condition["reason"],
                                                       condition["message"]),
            )
            continue

        time_difference = current_timestamp - condition["last_transition_time"]
        check_result = list(
            check_levels(
                time_difference,
                levels_upper=condition_levels(params=params, condition=name),
                render_func=render.timespan,
            ))
        result = check_result[0]
        yield Result(
            state=result.state,
            summary=
            f"{condition_detailed_description(condition_name, condition['status'], condition['reason'], condition['message'])} for {result.summary}",
        )


register.check_plugin(
    name="kube_deployment_conditions",
    service_name="Condition",
    discovery_function=discovery,
    check_function=check,
    check_default_parameters=dict(
        available="no_levels",
        progressing="no_levels",
        replicafailure="no_levels",
    ),
    check_ruleset_name="kube_deployment_conditions",
Exemplo n.º 2
0
        ),
    ]),
])
def test_discover_smart_stats(section, discovered):
    assert list(smart.discover_smart_stats(section)) == discovered


@pytest.mark.parametrize("item, params, section, result", [
    ("/dev/sda", {
        'Current_Pending_Sector': 0,
        'End-to-End_Error': 0,
        'Reallocated_Sector_Ct': 0,
        'Reported_Uncorrect': 0,
        'UDMA_CRC_Error_Count': 0,
    }, SECTION_SD, [
        Result(state=State.OK, summary='Powered on: 67 days 1 hour'),
        Metric('Power_On_Hours', 1609),
        Result(state=State.OK, summary='Power cycles: 9'),
        Metric('Power_Cycle_Count', 9),
        Result(state=State.OK, summary='Uncorrectable errors: 0'),
        Metric('Reported_Uncorrect', 0),
        Result(state=State.OK, summary='Reallocated sectors: 0'),
        Metric('Reallocated_Sector_Ct', 0),
        Result(state=State.OK, summary='Pending sectors: 0'),
        Metric('Current_Pending_Sector', 0),
        Result(state=State.OK, summary='End-to-End errors: 0'),
        Metric('End-to-End_Error', 0),
        Result(state=State.OK, summary='UDMA CRC errors: 0'),
        Metric('UDMA_CRC_Error_Count', 0),
    ]),
    ("/dev/nvme0n1", {'Critical_Warning': 0}, SECTION_NVME, [
Exemplo n.º 3
0
    info = list(itertools.chain.from_iterable(generate_inputs()))
    assert sorted({s.item: s for s in ps_utils.discover_ps(  # type: ignore[attr-defined]
        PS_DISCOVERY_WATO_RULES,  # type: ignore[arg-type]
        ps_section.parse_ps(info),
        None,
        None,
    )}.values(), key=lambda s: s.item or "") == sorted(PS_DISCOVERED_ITEMS, key=lambda s: s.item or
            "")  # type: ignore[attr-defined]


CheckResult = tuple

check_results = [
    [
        Result(
            state=state.OK,
            summary="Processes: 1",
        ),
        Metric("count", 1, levels=(100000, 100000), boundaries=(0, None)),
        Result(
            state=state.WARN,
            notice="virtual: 1.00 GiB (warn/crit at 1.00 GiB/2.00 GiB)",
        ),
        Metric("vsz", 1050360, levels=(1073741824, 2147483648)),
        Result(
            state=state.OK,
            notice="physical: 296 MiB",
        ),
        Metric("rss", 303252, levels=(1073741824, 2147483648)),
        Result(
            state=state.WARN,
            notice="Percentage of total RAM: 28.92% (warn/crit at 25.00%/50.00%)",
Exemplo n.º 4
0
        ]],
        {
            'pages_total': 118722,
            'pages_color': 55876,
            'pages_bw': 62846
        },
    ),
])
def test_parse_printer_pages_ricoh(string_table, expected_parsed_data):
    assert parse_printer_pages_ricoh(string_table) == expected_parsed_data


@pytest.mark.parametrize("section,expected_results", [
    ({
        'pages_color': 21693,
        'pages_bw': 54198
    }, [
        Result(state=state.OK, summary='total prints: 75891'),
        Metric('pages_total', 75891.0),
        Result(state=state.OK, summary='b/w: 54198'),
        Metric('pages_bw', 54198.0),
        Result(state=state.OK, summary='color: 21693'),
        Metric('pages_color', 21693.0),
    ]),
])
def test_check_printer_pages_types(section, expected_results):
    assert list(check_printer_pages_types(section)) == expected_results


_ = __name__ == "__main__" and pytest.main(["-svv", "-T=unit", __file__])
Exemplo n.º 5
0
     {
         "state_mapping": {
             "Alert": State.CRIT,
             "Ignored": State.UNKNOWN,
             "No Data": State.UNKNOWN,
             "OK": State.OK,
             "Skipped": State.UNKNOWN,
             "Unknown": State.UNKNOWN,
             "Warn": State.WARN,
         },
         "tags_to_show": [],
     },
     [
         Result(
             state=State.CRIT,
             summary="Overall state: Alert",
             details="Devices in env {{env.name}} receive article updates delayed!",
         ),
         Result(
             state=State.OK,
             summary="Datadog thresholds: critical: 180.0",
         ),
     ],
     id="standard case",
 ),
 pytest.param(
     "staging | IDM Gateway Service | Import Error Rate",
     {
         "state_mapping": {
             "Alert": State.CRIT,
             "Ignored": State.UNKNOWN,
Exemplo n.º 6
0
def test_check():

    yielded_results = list(
        livestatus_status._generate_livestatus_results(
            "heute",
            Parameters(livestatus_status.livestatus_status_default_levels),
            PARSED_STATUS,
            PARSED_SSL,
            {
                "host_checks": [1, 2],
                "service_checks": [1, 2],
                "forks": [1, 2],
                "connections": [1, 2],
                "requests": [1, 2],
                "log_messages": [1, 2],
            },
            581785200,
        ))

    assert yielded_results == [
        Result(state=state.OK, summary='Livestatus version: 2019.05.31'),
        Result(state=state.OK, summary='Host checks: 0.0/s'),
        Metric('host_checks', 7.615869237677187e-05),
        Result(state=state.OK, summary='Service checks: 0.0/s'),
        Metric('service_checks', 0.0002685888198403617),
        Result(state=state.OK, notice='Process creations: -0.0/s'),
        Metric('forks', -3.4376948802370615e-09),
        Result(state=state.OK, notice='Livestatus connects: 0.0/s'),
        Metric('connections', 6.261761224351807e-06),
        Result(state=state.OK, notice='Livestatus requests: 0.0/s'),
        Metric('requests', 8.090614900637924e-06),
        Result(state=state.OK, notice='Log messages: 0.0/s'),
        Metric('log_messages', 1.5985281193102335e-06),
        Result(state=state.OK, notice='Average check latency: 0.000s'),
        Metric('average_latency_generic', 2.23711e-06, levels=(30.0, 60.0)),
        Result(state=state.OK, notice='Average Checkmk latency: 0.000s'),
        Metric('average_latency_cmk', 2.01088e-05, levels=(30.0, 60.0)),
        Result(state=state.OK, notice='Average fetcher latency: 0.000s'),
        Metric('average_latency_fetcher', 2.01088e-05, levels=(30.0, 60.0)),
        Result(state=state.OK, notice='Check helper usage: 1.4%'),
        Metric('helper_usage_generic', 1.42967, levels=(60.0, 90.0)),
        Result(state=state.OK, notice='Checkmk helper usage: 0.044%'),
        Metric('helper_usage_cmk', 0.043827200000000004, levels=(60.0, 90.0)),
        Result(state=state.OK, notice='Fetcher helper usage: 0.044%'),
        Metric('helper_usage_fetcher',
               0.043827200000000004,
               levels=(40.0, 80.0)),
        Result(state=state.OK, notice='Checker helper usage: 0.044%'),
        Metric('helper_usage_checker',
               0.043827200000000004,
               levels=(40.0, 80.0)),
        Result(state=state.OK, notice='Livestatus usage: 0.000000000000%'),
        Metric('livestatus_usage', 3.46e-321, levels=(80.0, 90.0)),
        Result(state=state.OK, notice='Livestatus overflow rate: 0.0/s'),
        Metric('livestatus_overflows_rate', 0.0, levels=(0.01, 0.02)),
        Result(state=state.OK, notice='Hosts: 2.00'),
        Metric('monitored_hosts', 2.0),
        Result(state=state.OK, notice='Services: 513.00'),
        Metric('monitored_services', 513.0),
        Result(state=state.OK, notice='Core version: Checkmk 2019.05.31'),
        Result(
            state=state.OK,
            notice='Site certificate valid until Oct 01 3017',
        ),
        Result(
            state=state.OK,
            notice='Expiring in: 1029 years 363 days',
        ),
        Metric('site_cert_days', 375948.7452314815),
    ]
Exemplo n.º 7
0
from cmk.base.api.agent_based import value_store

import pytest  # type: ignore[import]

KILO = 1024

MEGA = KILO**2


@pytest.mark.parametrize(
    "label,used,total,levels,kwargs,expected",
    [
        # all variants of "no levels"
        ("Longterm", 23 * MEGA, 42 * MEGA, None, {}, [
            Result(
                state=state.OK,
                summary="Longterm: 54.8% - 23.0 MiB of 42.0 MiB",
            ),
        ]),
        ("Longterm", 23 * MEGA, 42 * MEGA, "ignore", {}, [
            Result(
                state=state.OK,
                summary="Longterm: 54.8% - 23.0 MiB of 42.0 MiB",
            ),
        ]),
        ("Longterm", 23 * MEGA, 42 * MEGA, ("ignore", None), {}, [
            Result(
                state=state.OK,
                summary="Longterm: 54.8% - 23.0 MiB of 42.0 MiB",
            ),
        ]),
        ("Longterm", 23 * MEGA, 42 * MEGA, ("ignore", (None, None)), {}, [
Exemplo n.º 8
0
def test_winperf_if_group_patterns(value_store):
    expected_services = [
        Service(item='Broadcom ABC123 NetXtreme 123 GigE [Client1] 138',
                parameters={
                    'discovered_oper_status': ['1'],
                    'discovered_speed': 1410065408
                }),
        Service(item='Broadcom ABC456 NetXtreme 456 GigE [Client2] 137',
                parameters={
                    'discovered_oper_status': ['1'],
                    'discovered_speed': 1410065408
                }),
        Service(
            item='Broadcom',
            parameters={
                'aggregate': {
                    'member_appearance': 'descr',
                    'inclusion_condition': {
                        'match_desc': [
                            'Broadcom ABC123 NetXtreme 123 GigE \\[Client1\\] 138',
                            'Broadcom ABC456 NetXtreme 456 GigE \\[Client2\\] 137'
                        ]
                    },
                    'exclusion_conditions': []
                },
                'discovered_oper_status': ['1'],
                'discovered_speed': 2820130816.0
            }),
        Service(
            item='isatap',
            parameters={
                'aggregate': {
                    'member_appearance': 'descr',
                    'inclusion_condition': {
                        'match_desc': [
                            'isatap.{A1A1A1A1-A1A1-A1A1-A1A1-A1A1A1A1A1A1}',
                            'isatap.{B1B1B1B1-B1B1-B1B1-B1B1-B1B1B1B1B1B1}',
                            'isatap.{C1C1C1C1-C1C1-C1C1-C1C1-C1C1C1C1C1C1}',
                            'isatap.{D1D1D1D1-D1D1-D1D1-D1D1-D1D1D1D1D1D1}',
                            'isatap.{E1E1E1E1-E1E1-E1E1-E1E1-E1E1E1E1E1E1}',
                            'isatap.{F1F1F1F1-F1F1-F1F1-F1F1-F1F1F1F1F1F1}'
                        ]
                    },
                    'exclusion_conditions': []
                },
                'discovered_oper_status': ['1'],
                'discovered_speed': 600000.0
            }),
    ]
    section = winperf_if.parse_winperf_if(IF_SECTION)
    assert list(
        winperf_if.discover_winperf_if(
            [
                type_defs.Parameters({
                    'discovery_single': (
                        False,
                        {},
                    ),
                    'grouping': (
                        True,
                        {
                            'group_items': [
                                {
                                    'group_name': 'isatap',
                                    'member_appearance': 'descr',
                                },
                            ],
                        },
                    ),
                    'matching_conditions': (
                        False,
                        {
                            'match_desc': [
                                'isatap.{A1A1A1A1-A1A1-A1A1-A1A1-A1A1A1A1A1A1}',
                                'isatap.{B1B1B1B1-B1B1-B1B1-B1B1-B1B1B1B1B1B1}',
                                'isatap.{C1C1C1C1-C1C1-C1C1-C1C1-C1C1C1C1C1C1}',
                                'isatap.{D1D1D1D1-D1D1-D1D1-D1D1-D1D1D1D1D1D1}',
                                'isatap.{E1E1E1E1-E1E1-E1E1-E1E1-E1E1E1E1E1E1}',
                                'isatap.{F1F1F1F1-F1F1-F1F1-F1F1-F1F1F1F1F1F1}',
                            ],
                        },
                    ),
                }),
                type_defs.Parameters({
                    'grouping': (
                        True,
                        {
                            'group_items': [
                                {
                                    'group_name': 'Broadcom',
                                    'member_appearance': 'descr',
                                },
                            ],
                        },
                    ),
                    'matching_conditions': (
                        False,
                        {
                            'match_desc': [
                                'Broadcom ABC123 NetXtreme 123 GigE \\[Client1\\] 138',
                                'Broadcom ABC456 NetXtreme 456 GigE \\[Client2\\] 137',
                            ],
                        },
                    ),
                }),
                type_defs.Parameters({
                    **interfaces.DISCOVERY_DEFAULT_PARAMETERS,
                    'discovery_single': (
                        True,
                        {
                            'item_appearance': 'descr',
                            'pad_portnumbers': True,
                        },
                    ),
                }),
            ],
            section,
        )) == expected_services

    assert [
        result for service in expected_services
        for result in winperf_if.check_winperf_if(
            service.item or "",  # or "" to make mypy happy
            type_defs.Parameters(service.parameters),
            section,
        ) if not isinstance(result, IgnoreResults)
    ] == [
        Result(state=state.OK, summary='[1]'),
        Result(state=state.OK,
               summary='(Connected)',
               details='Operational state: Connected'),
        Result(state=state.OK, summary='Speed: 1.41 GBit/s'),
        Result(state=state.OK, summary='[2]'),
        Result(state=state.OK,
               summary='(Connected)',
               details='Operational state: Connected'),
        Result(state=state.OK, summary='Speed: 1.41 GBit/s'),
        Result(state=state.OK, summary='Teaming'),
        Result(state=state.OK, summary='(up)',
               details='Operational state: up'),
        Result(
            state=state.OK,
            summary=
            ('Members: [Broadcom ABC123 NetXtreme 123 GigE [Client1] 138 (Connected),'
             ' Broadcom ABC456 NetXtreme 456 GigE [Client2] 137 (Connected)]'),
        ),
        Result(state=state.OK, summary='Speed: 2.82 GBit/s'),
        Result(state=state.OK, summary='Teaming'),
        Result(state=state.OK, summary='(up)',
               details='Operational state: up'),
        Result(
            state=state.OK,
            summary=
            ('Members: [isatap.{A1A1A1A1-A1A1-A1A1-A1A1-A1A1A1A1A1A1} (Connected),'
             ' isatap.{B1B1B1B1-B1B1-B1B1-B1B1-B1B1B1B1B1B1} (Connected),'
             ' isatap.{C1C1C1C1-C1C1-C1C1-C1C1-C1C1C1C1C1C1} (Connected),'
             ' isatap.{D1D1D1D1-D1D1-D1D1-D1D1-D1D1D1D1D1D1} (Connected),'
             ' isatap.{E1E1E1E1-E1E1-E1E1-E1E1-E1E1E1E1E1E1} (Connected),'
             ' isatap.{F1F1F1F1-F1F1-F1F1-F1F1-F1F1F1F1F1F1} (Connected)]'),
        ),
        Result(state=state.OK, summary='Speed: 600 kBit/s'),
    ]
Exemplo n.º 9
0
@pytest.mark.parametrize("item, params, expected_results", [
    ("H62 10 - DATA 20", df.FILESYSTEM_DEFAULT_LEVELS, [
        Metric(
            'fs_used',
            84.703125,
            levels=(256.0, 288.0),
            boundaries=(0.0, 320.0),
        ),
        Metric('fs_size', 320.0, boundaries=(0.0, None)),
        Metric(
            'fs_used_percent',
            26.4697265625,
            levels=(80.0, 90.0),
            boundaries=(0.0, 100.0),
        ),
        Result(state=state.OK, summary='26.47% used (84.7 of 320 MiB)'),
        Metric('growth', -4470.553049074118),
        Result(state=state.OK, summary='trend per 1 day 0 hours: +621 TiB'),
        Result(state=state.OK, summary='trend per 1 day 0 hours: +203357489.65%'),
        Metric(
            'trend',
            650743966.868858,
            boundaries=(0.0, 13.333333333333334),
        ),
        Result(state=state.OK, summary='Time left until disk full: 31 milliseconds'),
        Result(state=state.OK, summary='Service: scriptserver'),
        Result(state=state.OK,
               summary='Path: /hana/data/H62/mnt00007/hdb00020/datavolume_0000.dat'),
    ]),
    ("H62 10 - DATA 20", LEVELS_CRIT, [
        Metric(
Exemplo n.º 10
0
         [u'22731'],
         [u'[uptime_solaris_start]'],
         [
             u'SunOS', u'unknown', u'5.10', u'Generic_147148-26', u'i86pc',
             u'i386', u'i86pc'
         ],
         [u'global'],
         [
             u'4:58pm', u'up', u'6:19,', u'2', u'users,', u'load',
             u'average:', u'0.18,', u'0.06,', u'0.03'
         ],
         [u'unix:0:system_misc:snaptime', u'22737.886916295'],
         [u'[uptime_solaris_end]'],
     ],
     [
         Result(state=State.OK, summary='Up since Apr 15 2018 12:31:09'),
         Result(state=State.OK, summary='Uptime: 6 hours 18 minutes'),
         Metric('uptime', 22731),
     ],
 ),
 (
     [
         [u'1122'],
         [u'[uptime_solaris_start]'],
         [
             u'SunOS', u'unknown', u'5.10', u'Generic_147148-26', u'i86pc',
             u'i386', u'i86pc'
         ],
         [u'global'],
         [
             u'4:23pm', u'up', u'19', u'min(s),', u'2', u'users,', u'load',
Exemplo n.º 11
0
                group='DAG-NET',
            ),
        ],
        {},
    )


@pytest.mark.parametrize("item, params, results", [
    (
        '3',
        {
            'discovered_oper_status': ['1'],
            'discovered_speed': 10000000000
        },
        [
            Result(state=state.OK, summary='[SLOT 6 Port 1 DAG]'),
            Result(state=state.OK,
                   summary='(Connected)',
                   details='Operational state: Connected'),
            Result(state=state.OK, summary='MAC: A0:36:9F:B0:A3:60'),
            Result(state=state.OK, summary='Speed: 10 GBit/s'),
            Metric('in', 0.0, boundaries=(0.0, 1250000000.0)),
            Metric('inmcast', 0.0),
            Metric('inbcast', 0.0),
            Metric('inucast', 0.0),
            Metric('innucast', 0.0),
            Metric('indisc', 0.0),
            Metric('inerr', 0.0),
            Metric('out', 1073741824.0, boundaries=(0.0, 1250000000.0)),
            Metric('outmcast', 0.0),
            Metric('outbcast', 0.0),
Exemplo n.º 12
0
    assert list(winperf_processor.discover_winperf_processor_util(SECTION)) == [
        Service(),
    ]


@pytest.mark.parametrize(
    "value_store, params, result",
    [
        (
            {},
            {},
            [
                IgnoreResults("Initialized: 'util'"),
                IgnoreResults("Initialized: 'user'"),
                IgnoreResults("Initialized: 'privileged'"),
                Result(state=State.OK, notice="Number of processors: 4"),
                Metric("cpus", 4),
            ],
        ),
        (
            VALUE_STORE,
            {},
            [
                Result(state=State.OK, summary="Total CPU: 100.00%"),
                Metric("util", 99.99983122362869, boundaries=(0, 4)),  # boundaries: 4 as in 4 CPUs
                Result(state=State.OK, notice="User: <0.01%"),
                Metric("user", 0.0005063291139240507),
                Result(state=State.OK, notice="Privileged: <0.01%"),
                Metric("privileged", 1.687763713080169e-05),
                Result(state=State.OK, notice="Number of processors: 4"),
                Metric("cpus", 4),
Exemplo n.º 13
0
def test_check_item_not_in_section(section: Section, item: str, summary: str):
    results = list(check(item=item, section=section))
    assert results == [Result(state=State.CRIT, summary=summary)]
Exemplo n.º 14
0
        ("ora.cssd", "Clusterware not running"),
        ("ora.crsd", "Cluster resource service daemon not running"),
    ],
)
def test_check_item_not_in_section(section: Section, item: str, summary: str):
    results = list(check(item=item, section=section))
    assert results == [Result(state=State.CRIT, summary=summary)]


def test_check_item_not_in_section_and_cluster_down(section: Section):
    with pytest.raises(IgnoreResultsError):
        list(check(item="foo", section=section))


@pytest.mark.parametrize(
    "item, results",
    [
        (
            "ora.DG_CLUSTER.dg",
            [
                Result(state=State.OK, summary="online"),
                Result(state=State.OK, summary="on host2: online"),
                Result(state=State.CRIT, summary="on oracle_host: off, target state online"),
            ],
        ),
        ("ora.cluster_interconnect.haip", [Result(state=State.OK, summary="local: online")]),
    ],
)
def test_check_item_in_section(section: Section, item: str, results: Sequence[Result]):
    assert results == list(check(item=item, section=section))
Exemplo n.º 15
0
def test_storcli_cache_vault_check_ok(section: mcv.Section) -> None:
    assert list(mcv.check_storcli_cache_vault("/c0", section)) == [
        Result(state=State.OK, summary="Optimal"),
        Result(state=State.OK, summary="Capacitance: 111.00%"),
    ]
Exemplo n.º 16
0
            PARSED_SSL,
            {
                "host_checks": [1, 2],
                "service_checks": [1, 2],
                "forks": [1, 2],
                "connections": [1, 2],
                "requests": [1, 2],
                "log_messages": [1, 2],
            },
            581785200,
        ))
    assert all(x in yielded_results for x in fetcher_checker_counters)


_RESULTS = [
    Result(state=state.OK, summary='Livestatus version: 2019.05.31'),
    Result(state=state.OK, summary='Host checks: 0.0/s'),
    Metric('host_checks', 7.615869237677187e-05, boundaries=(0.0, None)),
    Result(state=state.OK, summary='Service checks: 0.0/s'),
    Metric('service_checks', 0.0002685888198403617, boundaries=(0.0, None)),
    Result(state=state.OK, notice='Process creations: -0.0/s'),
    Metric('forks', -3.4376948802370615e-09, boundaries=(0.0, None)),
    Result(state=state.OK, notice='Livestatus connects: 0.0/s'),
    Metric('connections', 6.261761224351807e-06, boundaries=(0.0, None)),
    Result(state=state.OK, notice='Livestatus requests: 0.0/s'),
    Metric('requests', 8.090614900637924e-06, boundaries=(0.0, None)),
    Result(state=state.OK, notice='Log messages: 0.0/s'),
    Metric('log_messages', 1.5985281193102335e-06, boundaries=(0.0, None)),
    Result(state=state.OK, notice='Average check latency: 0.000s'),
    Metric('average_latency_generic', 2.23711e-06, levels=(30.0, 60.0), boundaries=(0.0, None)),
    Result(state=state.OK, notice='Average Checkmk latency: 0.000s'),
Exemplo n.º 17
0
 pytest.param(
     {
         'failover_state': 1,
         'primary': 'active',
         'secondary': 'standby',
     },
     Section(
         local_role='primary',
         local_status='9',
         local_status_detail='Active unit',
         failover_link_status='2',
         remote_status='10',
     ),
     [
         Result(
             state=State.OK,
             summary='Device (primary) is the Active unit',
         ),
     ],
     id='Check: local unit == Primary unit == Active unit',
 ),
 pytest.param(
     {
         'failover_state': 1,
         'primary': 'active',
         'secondary': 'standby',
     },
     Section(
         local_role='primary',
         local_status='10',
         local_status_detail='Standby unit',
         failover_link_status='2',
Exemplo n.º 18
0

@pytest.mark.parametrize(
    "string_table,expected_parsed_data",
    [
        ([[["4"]]], 4),
    ],
)
def test_parse_f5_bigip_cluster_status(string_table, expected_parsed_data):
    assert parse_f5_bigip_cluster_status(string_table) == expected_parsed_data


@pytest.mark.parametrize(
    "arg,result",
    [
        ((def_params, 3), [Result(state=state.OK, summary="Node is active")]),
        ((def_params, 2), [Result(state=state.OK, summary="Node is active 2")
                           ]),
        ((def_params, 1), [Result(state=state.OK, summary="Node is active 1")
                           ]),
        ((def_params, 0), [Result(state=state.OK, summary="Node is standby")]),
    ],
)
def test_check_f5_bigip_cluster_status(arg, result):
    assert list(check_f5_bigip_cluster_status(arg[0], arg[1])) == result


@pytest.mark.parametrize(
    "arg,result",
    [
        ((def_params, 4), [Result(state=state.OK, summary="Node is active")]),
Exemplo n.º 19
0
    ],
)
def test_parse_kaspersky_av_client(string_table, now, expected_section):
    assert kaspersky_av_client._parse_kaspersky_av_client(
        string_table, now=now) == expected_section


@pytest.mark.parametrize(
    "section,results",
    [
        (
            dict(fullscan_age=2, signature_age=2),
            [
                Result(
                    state=State.WARN,
                    summary=
                    "Last update of signatures: 2 seconds ago (warn/crit at 2 seconds ago/3 seconds ago)",
                ),
                Result(
                    state=State.WARN,
                    summary=
                    "Last fullscan: 2 seconds ago (warn/crit at 2 seconds ago/3 seconds ago)",
                ),
            ],
        ),
        (
            dict(fullscan_age=3, signature_age=3),
            [
                Result(
                    state=State.CRIT,
                    summary=
Exemplo n.º 20
0
@pytest.mark.parametrize(
    "section_kube_memory_resources,section_kube_performance_memory,expected_result",
    [
        pytest.param(
            Resources(
                request=0.0,
                limit=28120704.0,
                count_unspecified_limits=0,
                count_zeroed_limits=0,
                count_unspecified_requests=0,
                count_total=2,
            ),
            None,
            (
                Result(state=State.OK,
                       summary="Requests: 0 B (2/2 containers with requests)"),
                Metric("kube_memory_request", 0.0, boundaries=(0.0, None)),
                Result(
                    state=State.OK,
                    summary="Limits: 26.8 MiB (2/2 containers with limits)"),
                Metric("kube_memory_limit", 28120704.0,
                       boundaries=(0.0, None)),
                Result(state=State.OK, summary="Allocatable: 34.3 MiB"),
                Metric("kube_memory_allocatable",
                       35917989.0,
                       boundaries=(0.0, None)),
            ),
            id="No performance data",
        ),
        pytest.param(
            Resources(
Exemplo n.º 21
0
    ])
def test_regression_discovery(monkeypatch, discovery_params, discovery_results):
    patch_discovery_params_retrieval(monkeypatch, discovery_params)
    assert list(ipmi.discover_ipmi(
        SECTION_IPMI,
        SECTION_IPMI_DISCRETE,
    )) == discovery_results


@pytest.mark.parametrize('item, check_results', [
    ('Summary', [
        Metric('ambient_temp', 18.5),
        Result(
            state=state.CRIT,
            summary=
            '146 sensors - 105 OK - 1 CRIT: PS1_Status (ok (Presence detected, Failure detected     <= NOT OK !!)) - 40 skipped',
            details=
            '146 sensors - 105 OK - 1 CRIT: PS1_Status (ok (Presence detected, Failure detected     <= NOT OK !!)) - 40 skipped'
        )
    ]),
    ('CMOS_Battery', [Result(state=state.OK, summary='Status: ok', details='Status: ok')]),
    ('ROMB_Battery', [Result(state=state.OK, summary='Status: ok', details='Status: ok')]),
    ('VCORE', [
        Result(state=state.OK,
               summary='Status: ok (State Deasserted)',
               details='Status: ok (State Deasserted)')
    ]),
    ('1.2V_VDDR', [
        Result(state=state.OK,
               summary='Status: ok (State Deasserted)',
               details='Status: ok (State Deasserted)')
Exemplo n.º 22
0
        "1072 mount_id3.trend": [LAST_TIME_EPOCH, LAST_TIME_EPOCH, 8989],
    }
    monkeypatch.setattr(hitachi_hnas_volume, "get_value_store",
                        lambda: value_store_patched)
    yield value_store_patched


@pytest.mark.parametrize(
    "item,params,section,expected",
    [
        (
            "1024 mount_id1",
            {},
            common_section,
            (
                Result(state=State.OK,
                       summary="no filesystem size information"),
                Result(state=State.OK, summary="Status: mounted"),
                Result(state=State.OK, summary="assigned to EVS 1"),
            ),
        ),
        (
            "1071 mount_id2",
            {},
            common_section,
            (
                Result(state=State.OK,
                       summary="no filesystem size information"),
                Result(state=State.WARN, summary="Status: unformatted"),
                Result(state=State.OK, summary="assigned to EVS 1"),
            ),
        ),
Exemplo n.º 23
0
@pytest.mark.parametrize(
    "section_kube_memory_resources,section_kube_performance_memory,expected_result",
    [
        pytest.param(
            Resources(
                request=0.0,
                limit=28120704.0,
                count_unspecified_limits=0,
                count_zeroed_limits=0,
                count_unspecified_requests=0,
                count_total=2,
            ),
            None,
            (
                Result(state=State.OK, summary="Request: 0 B (2/2 containers with requests)"),
                Metric("kube_memory_request", 0.0, boundaries=(0.0, None)),
                Result(state=State.OK, summary="Limit: 26.8 MiB (2/2 containers with limits)"),
                Metric("kube_memory_limit", 28120704.0, boundaries=(0.0, None)),
            ),
            id="No performance data",
        ),
        pytest.param(
            Resources(
                request=0.0,
                limit=0.0,
                count_unspecified_limits=0,
                count_zeroed_limits=2,
                count_unspecified_requests=0,
                count_total=2,
            ),
Exemplo n.º 24
0
    yield value_store_patched


@pytest.mark.parametrize(
    "item, info, expected_result",
    [
        (
            "HXE 90 HXE - Log",
            [
                ["[[HXE 90 HXE]]"],
                ["Data", "OK", "Size 64.3 GB, Used 10.1 GB, Free 85 %"],
                ["Log", "OK", "Size 64.3 GB, Used 10.1 GB, Free 85 %"],
                ["Trace", "OK", "Size 64.3 GB, Used 10.1 GB, Free 85 %"],
            ],
            [
                Result(state=State.OK, summary="Status: OK"),
                Metric(
                    "fs_used",
                    10342.400000000001,
                    levels=(52674.56, 59258.88),
                    boundaries=(0.0, 65843.2),
                ),
                Metric("fs_size", 65843.2, boundaries=(0.0, None)),
                Metric(
                    "fs_used_percent",
                    15.707620528771386,
                    levels=(80.0, 90.0),
                    boundaries=(0.0, 100.0),
                ),
                Result(state=State.OK,
                       summary="15.71% used (10.1 of 64.3 GiB)"),
Exemplo n.º 25
0
def test_discover_oracle_performance(fix_register, string_table,
                                     expected_result):
    check_plugin = fix_register.check_plugins[CheckPluginName(
        "oracle_performance")]
    section = parse_oracle_performance(string_table)
    assert sorted(check_plugin.discovery_function(section)) == expected_result


@pytest.mark.parametrize(
    "string_table, item, expected_result",
    [
        (
            _AGENT_OUTPUT_1,
            "TWH",
            [
                Result(state=State.OK, summary="DB Time: 0.00 1/s"),
                Metric("oracle_db_time", 0.0),
                Result(state=State.OK, summary="DB CPU: 0.00 1/s"),
                Metric("oracle_db_cpu", 0.0),
                Result(state=State.OK, summary="DB Non-Idle Wait: 0.00 1/s"),
                Metric("oracle_db_wait_time", 0.0),
                Result(state=State.OK, summary="Buffer hit ratio: 98.1%"),
                Metric("oracle_buffer_hit_ratio", 98.096392315184),
                Result(state=State.OK,
                       summary="Library cache hit ratio: 99.3%"),
                Metric("oracle_library_cache_hit_ratio", 99.32706545096245),
                Metric("oracle_buffer_busy_wait", 0.0),
                Metric("oracle_consistent_gets", 0.0),
                Metric("oracle_db_block_change", 0.0),
                Metric("oracle_db_block_gets", 0.0),
                Metric("oracle_free_buffer_wait", 0.0),
Exemplo n.º 26
0
def check_waiting(params: Mapping[str, int],
                  state: ContainerWaitingState) -> CheckResult:
    summary = f"Status: Waiting ({state.reason}: {state.detail})"
    yield Result(state=State.OK, summary=summary)
Exemplo n.º 27
0
    if item not in section_gcp_service_cloud_sql:
        return
    metrics = {
        "up": gcp.MetricSpec("cloudsql.googleapis.com/database/up", str, dtype="int"),
    }
    timeseries = section_gcp_service_cloud_sql[item].rows
    yield from gcp.generic_check(metrics, timeseries, {})

    metric_type = "cloudsql.googleapis.com/database/state"
    if (metric := next((r for r in timeseries if r.ts.metric.type == metric_type), None)) is None:
        yield Result(state=State.UNKNOWN, summary="No data available")
        return
    gcp_state = metric.ts.points[0].value.string_value
    state = State(params[gcp_state])
    summary = f"State: {gcp_state}"
    yield Result(state=state, summary=summary)


register.check_plugin(
    name="gcp_sql_status",
    sections=["gcp_service_cloud_sql", "gcp_assets"],
    service_name="GCP Cloud SQL: %s",
    check_ruleset_name="gcp_sql_status",
    discovery_function=discover,
    check_function=check_gcp_sql_status,
    check_default_parameters={
        "RUNNING": int(State.OK),
        "SUSPEND": int(State.WARN),
        "RUNNABLE": int(State.OK),
        "PENDING_CREATE": int(State.UNKNOWN),
        "MAINTENANCE": int(State.UNKNOWN),
Exemplo n.º 28
0
def check_running(params: Mapping[str, int],
                  state: ContainerRunningState) -> CheckResult:
    start_time_timestamp = state.start_time
    time_delta = time() - start_time_timestamp
    summary = f"Status: Running for {render.timespan(time_delta)}"
    yield Result(state=State.OK, summary=summary)
Exemplo n.º 29
0
def test_subset_patterns():

    section_ps = ps_section.parse_ps(
        splitter("""(user,0,0,0.5) main
(user,0,0,0.4) main_dev
(user,0,0,0.1) main_dev
(user,0,0,0.5) main_test"""))

    # Boundary in match is necessary otherwise main instance accumulates all
    inv_params: List[Dict] = [{
        'default_params': {
            'cpu_rescale_max': True,
            'levels': (1, 1, 99999, 99999)
        },
        'match': '~(main.*)\\b',
        'descr': '%s',
    }, {}]

    discovered = [
        Service(
            item='main',
            parameters={
                'cpu_rescale_max': True,
                'levels': (1, 1, 99999, 99999),
                'process': '~(main.*)\\b',
                'match_groups': ('main',),
                'user': None,
                'cgroup': (None, False),
            },
        ),
        Service(
            item='main_dev',
            parameters={
                'cpu_rescale_max': True,
                'levels': (1, 1, 99999, 99999),
                'process': '~(main.*)\\b',
                'match_groups': ('main_dev',),
                'user': None,
                'cgroup': (None, False),
            },
        ),
        Service(
            item='main_test',
            parameters={
                'cpu_rescale_max': True,
                'levels': (1, 1, 99999, 99999),
                'process': '~(main.*)\\b',
                'match_groups': ('main_test',),
                'user': None,
                'cgroup': (None, False),
            },
        ),
    ]

    test_discovered = ps_utils.discover_ps(inv_params, section_ps, None, None)  # type: ignore[arg-type]
    assert {s.item: s for s in test_discovered} == {s.item: s for s in discovered}  # type: ignore[attr-defined]

    for service, count in zip(discovered, [1, 2, 1]):
        assert isinstance(service.item, str)
        with value_store.context(CheckPluginName("ps"), "unit-test"):
            output = list(ps_utils.check_ps_common(
                label="Processes",
                item=service.item,
                params=service.parameters,  # type: ignore[arg-type]
                process_lines=[
                    (None, psi, cmd_line) for (psi, cmd_line) in section_ps[1]],
                cpu_cores=1,
                total_ram=None,
            ))
        assert output[0] == Result(state=state.OK, summary="Processes: %s" % count)
Exemplo n.º 30
0
)
def test_discovery_wlan_controller(section: Dict[str, WlanController],
                                   expected_result: List[Service]):
    services = discovery_wlan_controller(section)

    assert list(services) == expected_result


@pytest.mark.parametrize(
    "item, section, expected_result",
    [
        (
            "wism21",
            WLAN_CONTROLLERS_SECTION,
            [
                Result(state=State.OK, notice="Name: wism21"),
                Result(state=State.OK, summary="Type: Cisco WiSM2 Controller"),
                Result(state=State.OK, summary="Software version: 8.0.152.12"),
                Result(state=State.OK, notice="IP address: 10.5.1.11"),
                Result(state=State.OK, summary="Location: secgate 1:1"),
                Result(state=State.OK, summary="Group name: mobile-01"),
                Result(state=State.OK,
                       summary="Mobility group name: mobile-01"),
            ],
        ),
        ("wism23", WLAN_CONTROLLERS_SECTION, []),
    ],
)
def test_check_wlan_controller_metadata(item: str,
                                        section: Dict[str, WlanController],
                                        expected_result: List[CheckResult]):