Ejemplo n.º 1
0
def run_test_on_checks(check, subcheck, dataset, info_arg, immu, write):
    """Run check for test case listed in dataset"""
    test_cases = getattr(dataset, 'checks', {}).get(subcheck, [])
    check_func = check.info.get("check_function")
    check_plugin_name = CheckPluginName(maincheckify(check.name))

    for item, params, results_expected_raw in test_cases:

        print("Dataset item %r in check %r" % (item, check.name))
        immu.register(params, 'params')

        with current_service(
                Service(
                    item=item,
                    check_plugin_name=check_plugin_name,
                    description="unit test description",
                    parameters={},
                )):
            result = CheckResult(check.run_check(item, params, info_arg))

        immu.test(' after check (%s): ' % check_func.__name__)

        result_expected = CheckResult(results_expected_raw)

        if write:
            new_entry = (item, params, result.raw_repr())
            dataset.update_check_result(subcheck, new_entry)
        else:
            assertCheckResultsEqual(result, result_expected)
Ejemplo n.º 2
0
def test_check_memory(check_manager, params, meminfo, expected):
    check_memory = check_manager.get_check("mem.used").context["check_memory"]
    copy_info = meminfo.copy()
    result = check_memory(params, meminfo)

    assertCheckResultsEqual(CheckResult(result), CheckResult(expected))
    assert copy_info == meminfo
Ejemplo n.º 3
0
def test_io_check(check_manager):
    item_1st = 'VMFS_01'
    params = {'flex_levels': 'irrelevant'}
    check = check_manager.get_check("hp_msa_volume.io")
    parsed = {
        u'VMFS_01': {
            u'durable-id': u'V3',
            u'data-read-numeric': u'23719999539712',
            u'data-written-numeric': u'18093374647808',
            u'virtual-disk-name': u'A',
            u'raidtype': u'RAID0',
        },
        u'VMFS_02': {
            u'durable-id': u'V4',
            u'data-read-numeric': u'49943891507200',
            u'data-written-numeric': u'7384656100352',
            u'virtual-disk-name': u'A',
            u'raidtype': u'RAID0',
        }
    }
    _, read, written = check.run_check(item_1st, params, parsed)
    assertCheckResultsEqual(
        CheckResult(read),
        CheckResult((0, 'Read: 0.00 B/s', [('disk_read_throughput', 0.0, None, None)])))
    assertCheckResultsEqual(
        CheckResult(written),
        CheckResult((0, 'Write: 0.00 B/s', [('disk_write_throughput', 0.0, None, None)])))
Ejemplo n.º 4
0
def test_df_check_with_parse(check_manager, item, params, info,
                             expected_result):
    check = check_manager.get_check("df")

    actual = CheckResult(check.run_check(item, params, check.run_parse(info)))
    expected = CheckResult(expected_result)
    assertCheckResultsEqual(actual, expected)
Ejemplo n.º 5
0
def test_df_check(check_manager):
    item_1st = 'VMFS_01'
    params = {'flex_levels': 'irrelevant'}
    check = check_manager.get_check("hp_msa_volume.df")
    parsed = {
        u'VMFS_01': {
            u'durable-id': u'V3',
            u'virtual-disk-name': u'A',
            u'total-size-numeric': u'4296482816',
            u'allocated-size-numeric': u'2484011008',
            u'raidtype': u'RAID0',
        },
        u'VMFS_02': {
            u'durable-id': u'V4',
            u'virtual-disk-name': u'A',
            u'total-size-numeric': u'4296286208',
            u'allocated-size-numeric': u'3925712896',
            u'raidtype': u'RAID0',
        }
    }
    expected_result = (0, '57.81% used (1.16 of 2.00 TB), trend: 0.00 B / 24 hours', [
        ('fs_used', 1212896, 1678313.6, 1888102.8, 0, 2097892),
        ('fs_size', 2097892),
        ('fs_used_percent', 57.81498761614039),
        ('growth', 0.0),
        ('trend', 0, None, None, 0, 87412.16666666667),
    ])
    _, trend_result = check.run_check(item_1st, params, parsed)
    assertCheckResultsEqual(CheckResult(trend_result), CheckResult(expected_result))
Ejemplo n.º 6
0
def test_df_check():
    item_1st = 'VMFS_01'
    params = {'flex_levels': 'irrelevant'}
    check = Check("hp_msa_volume.df")
    parsed = {
        u'VMFS_01': {
            u'durable-id': u'V3',
            u'virtual-disk-name': u'A',
            u'total-size-numeric': u'4296482816',
            u'allocated-size-numeric': u'2484011008',
            u'raidtype': u'RAID0',
        },
        u'VMFS_02': {
            u'durable-id': u'V4',
            u'virtual-disk-name': u'A',
            u'total-size-numeric': u'4296286208',
            u'allocated-size-numeric': u'3925712896',
            u'raidtype': u'RAID0',
        }
    }
    expected_result = (0, '57.81% used (1.16 of 2.00 TB), trend: +2.43 TB / 24 hours', [
        ('fs_used', 1212896, 1678313.6, 1888102.8, 0, 2097892),
        ('fs_size', 2097892),
        ('fs_used_percent', 57.81498761614039),
        ('growth', 1329829.766497462),
        ('trend', 2551581.1594836353, None, None, 0, 87412.16666666667),
    ])

    with freezegun.freeze_time("2020-07-31 07:00:00"), MockItemState((1596100000, 42)):
        _, trend_result = check.run_check(item_1st, params, parsed)

    assertCheckResultsEqual(CheckResult(trend_result), CheckResult(expected_result))
Ejemplo n.º 7
0
def test_check_single_queue(raw_queue, levels_length, expected_result):
    check_single_queue = _get_from_context("_check_single_queue")
    queue = _get_from_context("Queue")
    assertCheckResultsEqual(
        CheckResult(check_single_queue(queue(*raw_queue), levels_length)),
        CheckResult(expected_result),
    )
Ejemplo n.º 8
0
def test_check_temperature_called(test_case):
    check = Check('acme_temp')
    check_temperature = check.context['check_temperature']
    time = dt.datetime(2014, 1, 1, 0, 0, 0)

    state = {
        'temp.foo.delta': (unix_ts(time), test_case.reading),
        'temp.foo.trend': (0, 0)
    }

    with MockItemState(state):
        with freezegun.freeze_time(time + dt.timedelta(seconds=test_case.seconds_elapsed)):
            # Assuming atmospheric pressure...
            result = check_temperature(
                test_case.reading + test_case.growth,
                {
                    'device_level_handling': 'dev',
                    'trend_compute': test_case.wato_dict,
                },
                'foo',
                dev_unit='c',
                dev_levels=(100, 100),  # don't boil
                dev_levels_lower=(0, 0),  # don't freeze over
            )
            assertCheckResultsEqual(CheckResult(result), CheckResult(test_case.expected))
Ejemplo n.º 9
0
def test_df_check_groups_with_parse(add_params, expected_result):
    check = Check('df')
    params = df_params
    params.update(add_params)

    actual = CheckResult(check.run_check("my-group", params, parse_df(info_df_groups)))
    expected = CheckResult(expected_result)
    assertCheckResultsEqual(actual, expected)
Ejemplo n.º 10
0
def test_k8s_replicas(info, expected):
    check = Check("k8s_replicas")
    parsed = parse_json(info)
    actual = check.run_check(None, {}, parsed)

    assertCheckResultsEqual(
        CheckResult(actual),
        CheckResult(expected),
    )
Ejemplo n.º 11
0
def test_diskstat_dict_warns_and_crits_about_read_and_write_ios(
        check_manager, item, params, disks, expected, monkeypatch):
    check = check_manager.get_check("diskstat")
    check_diskstat_dict = check.context["check_diskstat_dict"]
    with monkeypatch.context() as m:
        m.setattr('time.time', lambda: 0.0)
        actual_result = CheckResult(check_diskstat_dict(item, params, disks))
    expected_result = CheckResult(expected)
    assertCheckResultsEqual(actual_result, expected_result)
Ejemplo n.º 12
0
def test_k8s_replicas(check_manager, info, expected):
    check = check_manager.get_check("k8s_replicas")
    parsed = check.run_parse(info)
    actual = check.run_check(None, {}, parsed)

    assertCheckResultsEqual(
        CheckResult(actual),
        CheckResult(expected),
    )
Ejemplo n.º 13
0
def test_uptime_solaris_inputs(check_manager, info, reference):
    check = check_manager.get_check("uptime")

    parsed = check.run_parse(info)

    # This time freeze has no correlation with the uptime of the test. It
    # is needed for the check output to always return the same infotext.
    # The true test happens on state and perfdata
    with on_time('2018-04-15 16:50', 'CET'):
        result = CheckResult(check.run_check(None, {}, parsed))
    assertCheckResultsEqual(result, CheckResult(reference))
Ejemplo n.º 14
0
def test_check_nullmailer_mailq(raw_queues, expected_result):
    dummy_item = ""
    params = _get_from_context("nullmailer_mailq_default_levels")
    check_nullmailer_mailq = _get_from_context("check_nullmailer_mailq")
    queue = _get_from_context("Queue")
    assertCheckResultsEqual(
        CheckResult(
            check_nullmailer_mailq(
                dummy_item, params,
                [queue(*raw_queue) for raw_queue in raw_queues])),
        CheckResult(expected_result),
    )
Ejemplo n.º 15
0
def test_if_check(check_manager, monkeypatch, item, params, result):
    check = check_manager.get_check('lnx_if')
    assert check.run_discovery(parsed_change(0)) == DISCOVERY

    monkeypatch.setattr('time.time', lambda: 0)
    with pytest.raises(MKCounterWrapped):
        CheckResult(check.run_check(item, params, parsed_change(0)))

    monkeypatch.setattr('time.time', lambda: 5)
    output = check.run_check(item, params, parsed_change(4000000))

    assertCheckResultsEqual(CheckResult(output), CheckResult(result))
Ejemplo n.º 16
0
def check_listed_result(check, list_entry, info_arg, immu):
    """Run check for all results listed in dataset"""
    item, params, results_expected_raw = list_entry
    print("Dataset item %r in check %r" % (item, check.name))

    immu.register(params, 'params')
    result_raw = check.run_check(item, params, info_arg)
    check_func = check.info.get("check_function")
    immu.test(' after check (%s): ' % check_func.__name__)

    result = CheckResult(result_raw)
    result_expected = CheckResult(results_expected_raw)
    assertCheckResultsEqual(result, result_expected)
Ejemplo n.º 17
0
def test_local_check(check_manager, monkeypatch, item, info):
    monkeypatch.setattr('time.time', lambda: 1556005721)
    check = check_manager.get_check('local')

    parsed = check.run_parse(info)
    assert parsed[item][0].cached == (420.0, 140.0, 300.0)

    result = CheckResult(check.run_check(item, {}, parsed))
    expected = CheckResult([
        (0,
         "Cache generated 7 m ago, cache interval: 5 m, elapsed cache lifespan: 140%"
         ),
        (0, "On node node_1: This Check is outdated", [("V", 1.0)]),
    ])
    assertCheckResultsEqual(result, expected)
Ejemplo n.º 18
0
def test_crashreport(check_manager, crashdata):
    try:
        generictests.run(check_manager, crashdata)
        check = check_manager.get_check(crashdata.full_checkname)
        #FIXME
        #if crashdata.is_discovery:
        #    if crashdata.parsed:
        #        raw_result = check.run_discovery(crashdata.parsed)
        #    else:
        #        raw_result = check.run_discovery(crashdata.info)
        #    print(DiscoveryResult(raw_result))
        #    return

        if 'item' in crashdata.vars:
            item = crashdata.vars['item']
            params = crashdata.vars.get('params', {})
            if crashdata.parsed:
                raw_result = check.run_check(item, params, crashdata.parsed)
            else:
                raw_result = check.run_check(item, params, crashdata.info)
            print(CheckResult(raw_result))
    except:
        pprint.pprint(crashdata.__dict__)
        crashdata.write()
        raise
Ejemplo n.º 19
0
def test_check_win_license(capture, result):
    check = Check("win_license")
    output = check.run_check(None, result.parameters
                             or check.default_parameters(),
                             check.run_parse(splitter(capture)))

    assertCheckResultsEqual(CheckResult(output), result.check_output)
Ejemplo n.º 20
0
def test_subset_patterns(check_manager):

    check = check_manager.get_check("ps")
    check.set_check_api_utils_globals()  # needed for host name

    parsed = check.context['parse_ps'](
        splitter("""(user,0,0,0.5) main
(user,0,0,0.4) main_dev
(user,0,0,0.1) main_dev
(user,0,0,0.5) main_test"""))[1]

    # Boundary in match is necessary otherwise main instance accumulates all
    wato_rule = [({
        'default_params': {
            'cpu_rescale_max': True,
            'levels': (1, 1, 99999, 99999)
        },
        'match': '~(main.*)\\b',
        'descr': '%s'
    }, [], ["@all"], {})]

    discovered = [
        ('main', {
            'cpu_rescale_max': True,
            'levels': (1, 1, 99999, 99999),
            'process': '~(main.*)\\b',
            'match_groups': ('main',),
            'user': None,
            'cgroup': (None, False),
        }),
        ('main_dev', {
            'cpu_rescale_max': True,
            'levels': (1, 1, 99999, 99999),
            'process': '~(main.*)\\b',
            'match_groups': ('main_dev',),
            'user': None,
            'cgroup': (None, False),
        }),
        ('main_test', {
            'cpu_rescale_max': True,
            'levels': (1, 1, 99999, 99999),
            'process': '~(main.*)\\b',
            'match_groups': ('main_test',),
            'user': None,
            'cgroup': (None, False),
        }),
    ]

    assert check.context["inventory_ps_common"](wato_rule, parsed) == discovered

    def counted_reference(count):
        return CheckResult([
            (0, "%s process%s" % (count, '' if count == 1 else 'es'), [("count", count, 100000,
                                                                        100000, 0, None)]),
            (0, "0.5% CPU", [("pcpu", 0.5, None, None, None, None)]),
        ])

    for (item, params), count in zip(discovered, [1, 2, 1]):
        output = CheckResult(check.context["check_ps_common"](item, params, parsed, cpu_cores=1))
        assertCheckResultsEqual(output, counted_reference(count))
Ejemplo n.º 21
0
def test_check_diskstat_line(monkeypatch, args, expected_result):
    monkeypatch.setattr(cmk.base.check_legacy_includes.diskstat, 'get_rate',
                        get_rate)
    monkeypatch.setattr(cmk.base.check_legacy_includes.diskstat, 'get_average',
                        get_average)
    actual_result = CheckResult(check_diskstat_line(*args))  # type: ignore[name-defined] # pylint: disable=undefined-variable
    assertCheckResultsEqual(actual_result, expected_result)
Ejemplo n.º 22
0
def test_check_ps_common_cpu(check_manager, monkeypatch, data):
    check = check_manager.get_check("ps")

    def time_info(agent_info, check_time, cputime, cpu_cores):
        with on_time(datetime.datetime.utcfromtimestamp(check_time), "CET"):
            parsed = check.context['parse_ps'](splitter(agent_info.format(cputime)))[1]

            return CheckResult(check.context["check_ps_common"](
                inv_item[0], inv_item[1], parsed, cpu_cores=cpu_cores))

    inv_item = (
        "test",
        {
            "process": "~test",
            "user": None,
            "levels": (1, 1, 99999, 99999)  # from factory defaults
        })
    if data.cpu_rescale_max is not None:
        inv_item[1].update({"cpu_rescale_max": data.cpu_rescale_max})

    # Initialize counters
    time_info(data.agent_info, 0, 0, data.cpu_cores)
    # Check_cpu_utilization
    output = time_info(data.agent_info, 60, data.cputime, data.cpu_cores)

    reference = CheckResult([
        (0, "1 process", [("count", 1, 100000, 100000, 0)]),
        (0, "105.00 kB virtual", [("vsz", 105, None, None, None, None)]),
        (0, "30.00 kB physical", [("rss", 30, None, None, None, None)]),
        check.context["cpu_check"](data.exp_load, inv_item[0], inv_item[1]),
        (0, "running for 239 m", []),
    ])

    assertCheckResultsEqual(output, reference)
Ejemplo n.º 23
0
def test_statgrab_cpu_check(info, mockstate, expected_result):

    check = Check("statgrab_cpu")

    # set up mocking of `get_item_state`
    with mock_item_state(mockstate):
        result = CheckResult(check.run_check(None, {}, info))
    assertCheckResultsEqual(result, expected_result)
Ejemplo n.º 24
0
def test_df_check_with_parse(check_manager, item, params, info,
                             expected_result):
    check = check_manager.get_check("df")

    if params == "default":
        params = check.default_parameters()

    result = CheckResult(check.run_check(item, params, check.run_parse(info)))
Ejemplo n.º 25
0
def test_check_diskstat_generic_summary_clutster(monkeypatch, info,
                                                 expected_result):
    monkeypatch.setattr(cmk.base.check_legacy_includes.diskstat, 'get_rate',
                        get_rate)
    monkeypatch.setattr(cmk.base.check_legacy_includes.diskstat, 'get_average',
                        get_average)
    actual_result = CheckResult(check_diskstat_generic("SUMMARY", {}, 0, info))  # type: ignore[name-defined] # pylint: disable=undefined-variable
    assertCheckResultsEqual(actual_result, expected_result)
Ejemplo n.º 26
0
def check_discovered_result(check, discovery_result, info_arg, immu):
    """Run the check on all discovered items with the default parameters.
    We cannot validate the results, but at least make sure we don't crash.
    """
    print("Check %r in check %r" % (discovery_result, check.name))

    item = discovery_result.item

    params = get_merged_parameters(check, discovery_result.default_params)
    immu.register(params, 'params')

    raw_checkresult = check.run_check(item, params, info_arg)
    check_func = check.info.get("check_function")
    immu.test(' after check (%s): ' % check_func.__name__)

    cr = CheckResult(raw_checkresult)

    return (item, params, cr.raw_repr())
Ejemplo n.º 27
0
def test_local_check(check_manager, monkeypatch, item, info):
    monkeypatch.setattr('time.time', lambda: 1556005721)
    check = check_manager.get_check('local')

    parsed = check.run_parse(info)
    assert parsed[item][0].expired == 120.

    with pytest.raises(MKCounterWrapped):
        CheckResult(check.run_check(item, {}, parsed))
Ejemplo n.º 28
0
def test_statgrab_cpu_check_error(info, mockstate):

    check = Check("statgrab_cpu")

    with mock_item_state(mockstate):
        # the mock values are designed to raise an exception.
        # to make sure it is raised, use this:
        with assertMKCounterWrapped('Too short time difference since last check'):
            CheckResult(check.run_check(None, {}, info))
Ejemplo n.º 29
0
def test_cpu_util_single_process_levels(check_manager, monkeypatch, cpu_cores):
    """Test CPU utilization per single process.
- Check that Number of cores weight is active
- Check that single process CPU utilization is present only on warn/crit states"""

    check = check_manager.get_check("ps")

    params: Dict[str, Any] = {
        'process': '~.*firefox',
        'process_info': "text",
        'cpu_rescale_max': True,
        'levels': (1, 1, 99999, 99999),
        'single_cpulevels': (45.0, 80.0),
    }

    def run_check_ps_common_with_elapsed_time(check_time, cputime):
        with on_time(check_time, "CET"):
            agent_info = """(on,2275004,434008,00:00:49/26:58,25576) firefox
(on,1869920,359836,00:01:23/6:57,25664) firefox
(on,7962644,229660,00:00:10/26:56,25758) firefox
(on,1523536,83064,00:{:02}:00/26:55,25898) firefox"""
            parsed = check.context['parse_ps'](splitter(agent_info.format(cputime)))[1]

            return CheckResult(check.context["check_ps_common"](
            'firefox', params, parsed, cpu_cores=cpu_cores))

    # CPU utilization is a counter, initialize it
    run_check_ps_common_with_elapsed_time(0, 0)
    # CPU utilization is a counter, after 60s time, one process consumes 2 min of CPU
    output = run_check_ps_common_with_elapsed_time(60, 2)

    cpu_util = 200.0 / cpu_cores
    cpu_util_s = check.context['get_percent_human_readable'](cpu_util)
    single_msg = 'firefox with PID 25898 CPU: %s (warn/crit at 45.0%%/80.0%%)' % cpu_util_s
    reference = [
        (0, "Processes: 4", [("count", 4, 100000, 100000, 0)]),
        (0, "virtual: 13.00 GB", [("vsz", 13631104, None, None, None, None)]),
        (0, "physical: 1.06 GB", [("rss", 1106568, None, None, None, None)]),
        (0, "CPU: %s" % cpu_util_s, [('pcpu', cpu_util, None, None, None, None)]),
        (0, 'youngest running for: 6 m', []),
        (0, 'oldest running for: 26 m', []),
        (0, "\r\n".join([
            '\nname firefox, user on, virtual size 2275004kB, resident size 434008kB, creation time 1970-01-01 00:34:02, pid 25576, cpu usage 0.0%',
            'name firefox, user on, virtual size 1869920kB, resident size 359836kB, creation time 1970-01-01 00:54:03, pid 25664, cpu usage 0.0%',
            'name firefox, user on, virtual size 7962644kB, resident size 229660kB, creation time 1970-01-01 00:34:04, pid 25758, cpu usage 0.0%',
            'name firefox, user on, virtual size 1523536kB, resident size 83064kB, creation time 1970-01-01 00:34:05, pid 25898, cpu usage %.1f%%\r\n'
            % cpu_util,
        ]))
    ]

    if cpu_util > params['single_cpulevels'][1]:
        reference.insert(4, (2, single_msg, []))
    elif cpu_util > params['single_cpulevels'][0]:
        reference.insert(4, (1, single_msg, []))

    assertCheckResultsEqual(output, CheckResult(reference))
Ejemplo n.º 30
0
    def run_check_ps_common_with_elapsed_time(check_time, cputime):
        with on_time(check_time, "CET"):
            agent_info = """(on,2275004,434008,00:00:49/26:58,25576) firefox
(on,1869920,359836,00:01:23/6:57,25664) firefox
(on,7962644,229660,00:00:10/26:56,25758) firefox
(on,1523536,83064,00:{:02}:00/26:55,25898) firefox"""
            parsed = check.context['parse_ps'](splitter(agent_info.format(cputime)))[1]

            return CheckResult(check.context["check_ps_common"](
            'firefox', params, parsed, cpu_cores=cpu_cores))