Ejemplo n.º 1
0
    def _get_static_check_entries(self, host_config):
        # type: (config.HostConfig) -> List[Service]
        entries = []  # type: List[Service]
        for _checkgroup_name, check_plugin_name, item, params in host_config.static_checks:
            # Make sure, that for dictionary based checks at least those keys
            # defined in the factory settings are present in the parameters
            # TODO: Isn't this done during checking for all checks in more generic code?
            if isinstance(params, dict) and check_plugin_name in config.check_info:
                def_levels_varname = config.check_info[check_plugin_name].get(
                    "default_levels_variable")
                if def_levels_varname:
                    for key, value in config.factory_settings.get(def_levels_varname, {}).items():
                        if key not in params:
                            params[key] = value

            descr = config.service_description(host_config.hostname, check_plugin_name, item)
            entries.append(Service(check_plugin_name, item, descr, params))

        # Note: We need to reverse the order of the static_checks. This is
        # because users assume that earlier rules have precedence over later
        # ones. For static checks that is important if there are two rules for
        # a host with the same combination of check type and item.
        return list(reversed(entries))
Ejemplo n.º 2
0
def _get_static_check_entries(
    host_config: config.HostConfig, ) -> Iterator[Service]:
    entries: List[Service] = []
    for _checkgroup_name, check_plugin_name_str, item, params in host_config.static_checks:
        # TODO (mo): centralize maincheckify: CMK-4295
        check_plugin_name = CheckPluginName(
            maincheckify(check_plugin_name_str))

        if config.has_timespecific_params(params):
            timespec_params = [params]
            params = {}
        else:
            timespec_params = []

        new_params = config.compute_check_parameters(
            host_config.hostname,
            check_plugin_name,
            item,
            params,
            for_static_checks=True,
        )

        if timespec_params:
            params = config.set_timespecific_param_list(
                timespec_params, new_params)
        else:
            params = new_params

        descr = config.service_description(host_config.hostname,
                                           check_plugin_name, item)
        entries.append(Service(check_plugin_name, item, descr, params))

    # Note: We need to reverse the order of the static_checks. This is
    # because users assume that earlier rules have precedence over later
    # ones. For static checks that is important if there are two rules for
    # a host with the same combination of check type and item.
    return reversed(entries)
Ejemplo n.º 3
0
def _enriched_discovered_services(
    host_name: HostName,
    check_plugin_name: CheckPluginName,
    plugins_services: checking_classes.DiscoveryResult,
) -> Generator[Service, None, None]:
    for service in plugins_services:
        description = config.service_description(host_name, check_plugin_name,
                                                 service.item)
        # make sanity check
        if not description:
            console.error(
                f"{host_name}: {check_plugin_name} returned empty service description - ignoring it.\n"
            )
            continue

        yield Service(
            check_plugin_name=check_plugin_name,
            item=service.item,
            description=description,
            parameters=unwrap_parameters(service.parameters),
            # Convert from APIs ServiceLabel to internal ServiceLabel
            service_labels=DiscoveredServiceLabels(*(ServiceLabel(*l)
                                                     for l in service.labels)),
        )
Ejemplo n.º 4
0
def test_get_cmk_passive_service_attributes(monkeypatch, hostname, result):
    ts = Scenario().add_host("localhost")
    ts.add_host("blub")
    ts.set_option(
        "extra_service_conf",
        {
            "contact_groups": [
                ("ding", ["localhost"], ["CPU load$"]),
            ],
            "check_interval": [
                (40.0, ["blub"], ["Check_MK$"]),
                (33.0, ["localhost"], ["CPU load$"]),
            ],
        },
    )
    config_cache = ts.apply(monkeypatch)
    host_config = config_cache.get_host_config(hostname)
    check_mk_attrs = core_config.get_service_attributes(
        hostname, "Check_MK", config_cache)

    service = Service(CheckPluginName("cpu_loads"), None, "CPU load", {})
    service_spec = core_config.get_cmk_passive_service_attributes(
        config_cache, host_config, service, check_mk_attrs)
    assert service_spec == result
Ejemplo n.º 5
0
    def _get_clustered_services(self, hostname, skip_autochecks):
        # type: (str, bool) -> CheckTable
        check_table = {}  # type: CheckTable
        for node in self._host_config.nodes or []:
            # TODO: Cleanup this to work exactly like the logic above (for a single host)
            node_config = self._config_cache.get_host_config(node)
            node_checks = list(self._get_static_check_entries(node_config))
            if not skip_autochecks:
                node_checks += self._config_cache.get_autochecks_of(node)

            for service in node_checks:
                if self._config_cache.host_of_clustered_service(
                        node, service.description) != hostname:
                    continue

                cluster_params = config.compute_check_parameters(
                    hostname, service.check_plugin_name, service.item,
                    service.parameters)
                cluster_service = Service(service.check_plugin_name,
                                          service.item, service.description,
                                          cluster_params,
                                          service.service_labels)
                check_table.update(self._handle_service(cluster_service))
        return check_table
Ejemplo n.º 6
0
def test_get_cmk_passive_service_attributes(monkeypatch, hostname, result):
    CheckManager().load(["cpu"])

    ts = Scenario().add_host("localhost")
    ts.add_host("blub")
    ts.set_option(
        "extra_service_conf", {
            "contact_groups": [
                (u'ding', ['localhost'], ["CPU load$"]),
            ],
            "check_interval": [
                (40.0, ['blub'], ["Check_MK$"]),
                (33.0, ['localhost'], ["CPU load$"]),
            ],
        })
    config_cache = ts.apply(monkeypatch)
    host_config = config_cache.get_host_config(hostname)
    check_mk_attrs = core_config.get_service_attributes(
        hostname, "Check_MK", config_cache)

    service = Service("cpu.loads", None, "CPU load", {})
    service_spec = core_config.get_cmk_passive_service_attributes(
        config_cache, host_config, service, check_mk_attrs)
    assert service_spec == result
Ejemplo n.º 7
0

# TODO: This misses a lot of cases
# - different get_check_table arguments
@pytest.mark.usefixtures("load_all_agent_based_plugins")
@pytest.mark.parametrize(
    "hostname,expected_result",
    [
        ("empty-host", {}),
        # Skip the autochecks automatically for ping hosts
        ("ping-host", {}),
        ("no-autochecks", {
            (CheckPluginName('smart_temp'), '/dev/sda'):
            Service(
                check_plugin_name=CheckPluginName("smart_temp"),
                item=u"/dev/sda",
                parameters={'levels': (35, 40)},
                description=u'Temperature SMART /dev/sda',
            ),
        }),
        # Static checks overwrite the autocheck definitions
        ("autocheck-overwrite", {
            (CheckPluginName('smart_temp'), '/dev/sda'):
            Service(
                check_plugin_name=CheckPluginName("smart_temp"),
                item=u"/dev/sda",
                parameters={'levels': (35, 40)},
                description=u'Temperature SMART /dev/sda',
            ),
            (CheckPluginName('smart_temp'), '/dev/sdb'):
            Service(
                check_plugin_name=CheckPluginName('smart_temp'),
Ejemplo n.º 8
0
    def _read_raw_autochecks_of(self, hostname):
        # type: (HostName) -> List[Service]
        """Read automatically discovered checks of one host"""
        basedir = cmk.utils.paths.autochecks_dir
        filepath = basedir + '/' + hostname + '.mk'

        result = []  # type: List[Service]
        if not os.path.exists(filepath):
            return result

        check_config = config.get_check_variables()
        try:
            cmk.base.console.vverbose("Loading autochecks from %s\n", filepath)
            autochecks_raw = eval(
                open(filepath).read().decode("utf-8"), check_config,
                check_config)  # type: List[Dict]
        except SyntaxError as e:
            cmk.base.console.verbose("Syntax error in file %s: %s\n",
                                     filepath,
                                     e,
                                     stream=sys.stderr)
            if cmk.utils.debug.enabled():
                raise
            return result
        except Exception as e:
            cmk.base.console.verbose("Error in file %s:\n%s\n", filepath, e, stream=sys.stderr)
            if cmk.utils.debug.enabled():
                raise
            return result

        for entry in autochecks_raw:
            if isinstance(entry, tuple):
                raise MKGeneralException(
                    "Invalid check entry '%r' of host '%s' (%s) found. This "
                    "entry is in pre Checkmk 1.6 format and needs to be converted. This is "
                    "normally done by \"cmk-update-config -v\" during \"omd update\". Please "
                    "execute \"cmk-update-config -v\" for convertig the old configuration." %
                    (entry, hostname, filepath))

            labels = DiscoveredServiceLabels()
            for label_id, label_value in entry["service_labels"].items():
                labels.add_label(ServiceLabel(label_id, label_value))

            # With Check_MK 1.2.7i3 items are now defined to be unicode strings. Convert
            # items from existing autocheck files for compatibility. TODO remove this one day
            item = entry["item"]
            if isinstance(item, str):
                item = convert_to_unicode(item)

            if not isinstance(entry["check_plugin_name"], six.string_types):
                raise MKGeneralException("Invalid entry '%r' in check table of host '%s': "
                                         "The check type must be a string." % (entry, hostname))

            check_plugin_name = str(entry["check_plugin_name"])

            try:
                description = config.service_description(hostname, check_plugin_name, item)
            except Exception:
                continue  # ignore

            result.append(
                Service(
                    check_plugin_name=check_plugin_name,
                    item=item,
                    description=description,
                    parameters=entry["parameters"],
                    service_labels=labels,
                ))

        return result
Ejemplo n.º 9
0
    def _read_raw_autochecks_uncached(
        self,
        hostname: HostName,
        service_description: GetServiceDescription,
    ) -> List[Service]:
        """Read automatically discovered checks of one host"""
        path = _autochecks_path_for(hostname)
        try:
            autochecks_raw = _load_raw_autochecks(
                path=path,
                check_variables=None,
            )
        except SyntaxError as e:
            console.verbose("Syntax error in file %s: %s\n",
                            path,
                            e,
                            stream=sys.stderr)
            if cmk.utils.debug.enabled():
                raise
            return []
        except Exception as e:
            console.verbose("Error in file %s:\n%s\n",
                            path,
                            e,
                            stream=sys.stderr)
            if cmk.utils.debug.enabled():
                raise
            return []

        services: List[Service] = []
        for entry in autochecks_raw:
            if isinstance(entry, tuple):
                raise MKGeneralException(
                    "Invalid check entry '%r' of host '%s' (%s) found. This "
                    "entry is in pre Checkmk 1.6 format and needs to be converted. This is "
                    "normally done by \"cmk-update-config -v\" during \"omd update\". Please "
                    "execute \"cmk-update-config -v\" for convertig the old configuration."
                    % (entry, hostname, path))

            labels = DiscoveredServiceLabels()
            for label_id, label_value in entry["service_labels"].items():
                labels.add_label(ServiceLabel(label_id, label_value))

            # With Check_MK 1.2.7i3 items are now defined to be unicode strings. Convert
            # items from existing autocheck files for compatibility. TODO remove this one day
            item = entry["item"]

            if not isinstance(entry["check_plugin_name"], str):
                raise MKGeneralException(
                    "Invalid entry '%r' in check table of host '%s': "
                    "The check type must be a string." % (entry, hostname))

            check_plugin_name_str = str(entry["check_plugin_name"])
            # TODO (mo): centralize maincheckify: CMK-4295
            check_plugin_name = CheckPluginName(
                maincheckify(check_plugin_name_str))
            try:
                description = service_description(hostname, check_plugin_name,
                                                  item)
            except Exception:
                continue  # ignore

            services.append(
                Service(
                    check_plugin_name=check_plugin_name_str,
                    item=item,
                    description=description,
                    parameters=entry["parameters"],
                    service_labels=labels,
                ))

        return services
Ejemplo n.º 10
0
def _service(plugin_name: str, item: str) -> Service:
    return Service(CheckPluginName(plugin_name), item, "", {})
Ejemplo n.º 11
0
    def _read_raw_autochecks_uncached(
        self,
        hostname: HostName,
        service_description: GetServiceDescription,
    ) -> List[Service]:
        """Read automatically discovered checks of one host"""
        path = _autochecks_path_for(hostname)
        try:
            autochecks_raw = _load_raw_autochecks(
                path=path,
                check_variables=None,
            )
        except SyntaxError as e:
            logger.exception("Syntax error in file %s: %s", path, e)
            if cmk.utils.debug.enabled():
                raise
            return []
        except Exception as e:
            logger.exception("Error in file %s:\n%s", path, e)
            if cmk.utils.debug.enabled():
                raise
            return []

        services: List[Service] = []
        for entry in autochecks_raw:
            try:
                item = entry["item"]
            except TypeError:  # pre 1.6 tuple!
                raise MKGeneralException(
                    "Invalid check entry '%r' of host '%s' (%s) found. This "
                    "entry is in pre Checkmk 1.6 format and needs to be converted. This is "
                    "normally done by \"cmk-update-config -v\" during \"omd update\". Please "
                    "execute \"cmk-update-config -v\" for convertig the old configuration."
                    % (entry, hostname, path))

            try:
                plugin_name = CheckPluginName(
                    maincheckify(entry["check_plugin_name"]))
                assert item is None or isinstance(item, str)
            except Exception:
                raise MKGeneralException(
                    "Invalid check entry '%r' of host '%s' (%s) found. This "
                    "entry is in pre Checkmk 2.0 format and needs to be converted. This is "
                    "normally done by \"cmk-update-config -v\" during \"omd update\". Please "
                    "execute \"cmk-update-config -v\" for convertig the old configuration."
                    % (entry, hostname, path))

            labels = DiscoveredServiceLabels()
            for label_id, label_value in entry["service_labels"].items():
                labels.add_label(ServiceLabel(label_id, label_value))

            try:
                description = service_description(hostname, plugin_name, item)
            except Exception:
                continue  # ignore

            services.append(
                Service(
                    check_plugin_name=plugin_name,
                    item=item,
                    description=description,
                    parameters=entry["parameters"],
                    service_labels=labels,
                ))

        return services
Ejemplo n.º 12
0
def test_get_check_table(
    monkeypatch: MonkeyPatch, hostname_str: str, expected_result: HostCheckTable
) -> None:
    hostname = HostName(hostname_str)
    autochecks = {
        "ping-host": [
            Service(
                CheckPluginName("smart_temp"),
                "bla",
                "Temperature SMART bla",
                {},
            )
        ],
        "autocheck-overwrite": [
            Service(
                CheckPluginName("smart_temp"),
                "/dev/sda",
                "Temperature SMART /dev/sda",
                {"is_autocheck": True},
            ),
            Service(
                CheckPluginName("smart_temp"),
                "/dev/sdb",
                "Temperature SMART /dev/sdb",
                {"is_autocheck": True},
            ),
        ],
        "ignore-not-existing-checks": [
            Service(
                CheckPluginName("bla_blub"),
                "ITEM",
                "Blub ITEM",
                {},
            ),
        ],
        "node1": [
            Service(
                CheckPluginName("smart_temp"),
                "auto-clustered",
                "Temperature SMART auto-clustered",
                {},
            ),
            Service(
                CheckPluginName("smart_temp"),
                "auto-not-clustered",
                "Temperature SMART auto-not-clustered",
                {},
            ),
        ],
    }

    ts = Scenario().add_host(hostname, tags={"criticality": "test"})
    ts.add_host("ping-host", tags={"agent": "no-agent"})
    ts.add_host("node1")
    ts.add_cluster("cluster1", nodes=["node1"])
    ts.set_option(
        "static_checks",
        {
            "temperature": [
                (("smart.temp", "/dev/sda", {}), [], ["no-autochecks", "autocheck-overwrite"]),
                (("blub.bla", "ITEM", {}), [], ["ignore-not-existing-checks"]),
                (("smart.temp", "ITEM1", {}), [], ["ignore-disabled-rules"], {"disabled": True}),
                (("smart.temp", "ITEM2", {}), [], ["ignore-disabled-rules"]),
                (("smart.temp", "/dev/sda", {"rule": 1}), [], ["static-check-overwrite"]),
                (("smart.temp", "/dev/sda", {"rule": 2}), [], ["static-check-overwrite"]),
                (("smart.temp", "static-node1", {}), [], ["node1"]),
                (("smart.temp", "static-cluster", {}), [], ["cluster1"]),
            ]
        },
    )
    ts.set_ruleset(
        "clustered_services",
        [
            ([], ["node1"], ["Temperature SMART auto-clustered$"]),
        ],
    )
    config_cache = ts.apply(monkeypatch)
    monkeypatch.setattr(config_cache, "get_autochecks_of", lambda h: autochecks.get(h, []))

    assert check_table.get_check_table(hostname) == expected_result
Ejemplo n.º 13
0
def test_get_check_table_of_static_check(
    monkeypatch: MonkeyPatch, hostname_str: str, expected_result: List[ServiceID]
) -> None:
    hostname = HostName(hostname_str)
    static_checks = {
        "df_host": [
            Service(
                CheckPluginName("df"),
                "/snap/core/9066",
                "Filesystem /snap/core/9066",
                [
                    {"tp_values": [("24X7", {"inodes_levels": None})], "tp_default_value": {}},
                    {
                        "trend_range": 24,
                        "show_levels": "onmagic",
                        "inodes_levels": (10.0, 5.0),
                        "magic_normsize": 20,
                        "show_inodes": "onlow",
                        "levels": (80.0, 90.0),
                        "show_reserved": False,
                        "levels_low": (50.0, 60.0),
                        "trend_perfdata": True,
                    },
                ],
            ),
        ],
        "df_host_1": [
            Service(
                CheckPluginName("df"),
                "/snap/core/9067",
                "Filesystem /snap/core/9067",
                {
                    "trend_range": 24,
                    "show_levels": "onmagic",
                    "inodes_levels": (10.0, 5.0),
                    "magic_normsize": 20,
                    "show_inodes": "onlow",
                    "levels": (80.0, 90.0),
                    "tp_default_value": {"levels": (87.0, 90.0)},
                    "show_reserved": False,
                    "tp_values": [("24X7", {"inodes_levels": None})],
                    "levels_low": (50.0, 60.0),
                    "trend_perfdata": True,
                },
            )
        ],
        "df_host_2": [
            Service(CheckPluginName("df"), "/snap/core/9068", "Filesystem /snap/core/9068", None)
        ],
    }

    ts = Scenario().add_host(hostname, tags={"criticality": "test"})
    ts.add_host("df_host")
    ts.add_host("df_host_1")
    ts.add_host("df_host_2")
    ts.set_option(
        "static_checks",
        {
            "filesystem": [
                (
                    (
                        "df",
                        "/snap/core/9066",
                        [
                            {
                                "tp_values": [("24X7", {"inodes_levels": None})],
                                "tp_default_value": {},
                            },
                            {
                                "trend_range": 24,
                                "show_levels": "onmagic",
                                "inodes_levels": (10.0, 5.0),
                                "magic_normsize": 20,
                                "show_inodes": "onlow",
                                "levels": (80.0, 90.0),
                                "show_reserved": False,
                                "levels_low": (50.0, 60.0),
                                "trend_perfdata": True,
                            },
                        ],
                    ),
                    [],
                    ["df_host"],
                ),
                (
                    (
                        "df",
                        "/snap/core/9067",
                        [
                            {
                                "tp_values": [("24X7", {"inodes_levels": None})],
                                "tp_default_value": {},
                            },
                            {
                                "trend_range": 24,
                                "show_levels": "onmagic",
                                "inodes_levels": (10.0, 5.0),
                                "magic_normsize": 20,
                                "show_inodes": "onlow",
                                "levels": (80.0, 90.0),
                                "show_reserved": False,
                                "levels_low": (50.0, 60.0),
                                "trend_perfdata": True,
                            },
                        ],
                    ),
                    [],
                    ["df_host_1"],
                ),
                (("df", "/snap/core/9068", None), [], ["df_host_2"]),
            ],
        },
    )

    config_cache = ts.apply(monkeypatch)
    monkeypatch.setattr(config_cache, "get_autochecks_of", lambda h: static_checks.get(h, []))

    assert list(check_table.get_check_table(hostname).keys()) == expected_result
Ejemplo n.º 14
0
def _get_aggregated_result(
    *,
    parsed_sections_broker: ParsedSectionsBroker,
    hostname: HostName,
    ipaddress: Optional[HostAddress],
    service: Service,
    used_params: LegacyCheckParameters,
) -> AggregatedResult:
    legacy_check_plugin_name = config.legacy_check_plugin_names.get(service.check_plugin_name)
    if legacy_check_plugin_name is None:
        return AggregatedResult(
            submit=True,
            data_received=True,
            result=CHECK_NOT_IMPLEMENTED,
            cache_info=None,
        )

    check_function = config.check_info[legacy_check_plugin_name].get("check_function")
    if check_function is None:
        return AggregatedResult(
            submit=True,
            data_received=True,
            result=CHECK_NOT_IMPLEMENTED,
            cache_info=None,
        )

    section_name = legacy_check_plugin_name.split('.')[0]
    main_check_info = config.check_info.get(section_name, {})

    section_content = None
    multi_host_sections = _MultiHostSections(parsed_sections_broker)
    mgmt_board_info = main_check_info.get("management_board") or LEGACY_HOST_PRECEDENCE
    source_type = SourceType.MANAGEMENT if mgmt_board_info == LEGACY_MGMT_ONLY else SourceType.HOST
    try:
        section_content = multi_host_sections.get_section_content(
            HostKey(hostname, ipaddress, source_type),
            mgmt_board_info,
            section_name,
            for_discovery=False,
            cluster_node_keys=config.get_config_cache().get_clustered_service_node_keys(
                hostname,
                source_type,
                service.description,
            ),
            check_legacy_info=config.check_info,
        )

        if section_content is None:  # No data for this check type
            return AggregatedResult(
                submit=False,
                data_received=False,
                result=RECEIVED_NO_DATA,
                cache_info=None,
            )

        # Call the actual check function
        item_state.reset_wrapped_counters()

        raw_result = check_function(service.item, used_params, section_content)
        result = _sanitize_check_result(raw_result)
        item_state.raise_counter_wrap()

    except item_state.MKCounterWrapped as exc:
        # handle check implementations that do not yet support the
        # handling of wrapped counters via exception on their own.
        # Do not submit any check result in that case:
        return AggregatedResult(
            submit=False,
            data_received=True,
            result=(0, f"Cannot compute check result: {exc}\n", []),
            cache_info=None,
        )

    except MKTimeout:
        raise

    except Exception:
        if cmk.utils.debug.enabled():
            raise
        result = 3, cmk.base.crash_reporting.create_check_crash_dump(
            host_name=hostname,
            service_name=service.description,
            plugin_name=service.check_plugin_name,
            plugin_kwargs={
                "item": service.item,
                "params": used_params,
                "section_content": section_content
            },
            is_manual=service.id() in check_table.get_check_table(hostname, skip_autochecks=True),
        ), []

    return AggregatedResult(
        submit=True,
        data_received=True,
        result=result,
        cache_info=multi_host_sections.legacy_determine_cache_info(SectionName(section_name)),
    )
Ejemplo n.º 15
0
# TODO: This misses a lot of cases
# - different get_check_table arguments
@pytest.mark.usefixtures("fix_register")
@pytest.mark.parametrize(
    "hostname_str,expected_result",
    [
        ("empty-host", {}),
        # Skip the autochecks automatically for ping hosts
        ("ping-host", {}),
        (
            "no-autochecks",
            {
                (CheckPluginName("smart_temp"), "/dev/sda"): Service(
                    check_plugin_name=CheckPluginName("smart_temp"),
                    item="/dev/sda",
                    parameters={"levels": (35, 40)},
                    description="Temperature SMART /dev/sda",
                ),
            },
        ),
        # Static checks overwrite the autocheck definitions
        (
            "autocheck-overwrite",
            {
                (CheckPluginName("smart_temp"), "/dev/sda"): Service(
                    check_plugin_name=CheckPluginName("smart_temp"),
                    item="/dev/sda",
                    parameters={"levels": (35, 40)},
                    description="Temperature SMART /dev/sda",
                ),
                (CheckPluginName("smart_temp"), "/dev/sdb"): Service(
Ejemplo n.º 16
0
import cmk.base.check_table as check_table
from cmk.base.check_utils import Service


# TODO: This misses a lot of cases
# - different get_check_table arguments
@pytest.mark.parametrize(
    "hostname,expected_result",
    [
        ("empty-host", {}),
        # Skip the autochecks automatically for ping hosts
        ("ping-host", {}),
        ("no-autochecks", {
            ('smart.temp', '/dev/sda'): Service(
                check_plugin_name="smart.temp",
                item=u"/dev/sda",
                parameters={'levels': (35, 40)},
                description=u'Temperature SMART /dev/sda',
            ),
        }),
        # Static checks overwrite the autocheck definitions
        ("autocheck-overwrite", {
            ('smart.temp', '/dev/sda'): Service(
                check_plugin_name="smart.temp",
                item=u"/dev/sda",
                parameters={'levels': (35, 40)},
                description=u'Temperature SMART /dev/sda',
            ),
            ('smart.temp', '/dev/sdb'): Service(
                check_plugin_name='smart.temp',
                item=u'/dev/sdb',
                parameters={'is_autocheck': True},
Ejemplo n.º 17
0
def get_aggregated_result(
    parsed_sections_broker: ParsedSectionsBroker,
    host_config: config.HostConfig,
    ipaddress: Optional[HostAddress],
    service: Service,
    plugin: Optional[checking_classes.CheckPlugin],
    params_function: Callable[[], Parameters],
    *,
    value_store_manager: item_state.ValueStoreManager,
) -> AggregatedResult:
    """Run the check function and aggregate the subresults

    This function is also called during discovery.
    """
    if plugin is None:
        return AggregatedResult(
            submit=True,
            data_received=True,
            result=CHECK_NOT_IMPLEMENTED,
            cache_info=None,
        )

    check_function = (plugin.cluster_check_function
                      if host_config.is_cluster else plugin.check_function)

    source_type = (SourceType.MANAGEMENT
                   if service.check_plugin_name.is_management_name() else SourceType.HOST)

    config_cache = config.get_config_cache()

    kwargs = {}
    try:
        kwargs = parsed_sections_broker.get_section_cluster_kwargs(
            config_cache.get_clustered_service_node_keys(
                host_config.hostname,
                source_type,
                service.description,
            ) or [],
            plugin.sections,
        ) if host_config.is_cluster else parsed_sections_broker.get_section_kwargs(
            HostKey(host_config.hostname, ipaddress, source_type),
            plugin.sections,
        )

        if not kwargs and not service.check_plugin_name.is_management_name():
            # in 1.6 some plugins where discovered for management boards, but with
            # the regular host plugins name. In this case retry with the source type
            # forced to MANAGEMENT:
            kwargs = parsed_sections_broker.get_section_cluster_kwargs(
                config_cache.get_clustered_service_node_keys(
                    host_config.hostname,
                    SourceType.MANAGEMENT,
                    service.description,
                ) or [],
                plugin.sections,
            ) if host_config.is_cluster else parsed_sections_broker.get_section_kwargs(
                HostKey(host_config.hostname, ipaddress, SourceType.MANAGEMENT),
                plugin.sections,
            )

        if not kwargs:  # no data found
            return AggregatedResult(
                submit=False,
                data_received=False,
                result=RECEIVED_NO_DATA,
                cache_info=None,
            )

        if service.item is not None:
            kwargs["item"] = service.item

        if plugin.check_default_parameters is not None:
            kwargs["params"] = params_function()

        with plugin_contexts.current_host(host_config.hostname), \
            plugin_contexts.current_service(service), \
            value_store_manager.namespace(service.id()):
            result = _aggregate_results(check_function(**kwargs))

    except (item_state.MKCounterWrapped, checking_classes.IgnoreResultsError) as e:
        msg = str(e) or "No service summary available"
        return AggregatedResult(
            submit=False,
            data_received=True,
            result=(0, msg, []),
            cache_info=None,
        )

    except MKTimeout:
        raise

    except Exception:
        if cmk.utils.debug.enabled():
            raise
        table = check_table.get_check_table(host_config.hostname, skip_autochecks=True)
        result = 3, cmk.base.crash_reporting.create_check_crash_dump(
            host_name=host_config.hostname,
            service_name=service.description,
            plugin_name=service.check_plugin_name,
            plugin_kwargs=kwargs,
            is_manual=service.id() in table,
        ), []

    return AggregatedResult(
        submit=True,
        data_received=True,
        result=result,
        cache_info=parsed_sections_broker.get_cache_info(plugin.sections),
    )
Ejemplo n.º 18
0
        (u"[]", []),
        (u"", []),
        (u"@", []),
        (u"[abc123]", []),
        # Dict: Allow non string items
        (
            u"""[
  {'check_plugin_name': 'df', 'item': u'123', 'parameters': {}, 'service_labels': {}},
]""",
            [
                Service(
                    CheckPluginName('df'), '123', u"", {
                        'inodes_levels': (10.0, 5.0),
                        'levels': (80.0, 90.0),
                        'levels_low': (50.0, 60.0),
                        'magic_normsize': 20,
                        'show_inodes': 'onlow',
                        'show_levels': 'onmagic',
                        'show_reserved': False,
                        'trend_perfdata': True,
                        'trend_range': 24
                    }),
            ],
        ),
        # Dict: Exception on invalid check type
        (
            u"""[
  {'check_plugin_name': 123, 'item': 'abc', 'parameters': {}, 'service_labels': {}},
]""",
            MKGeneralException,
        ),
        # Dict: Exception on name reference behaves like SyntaxError
Ejemplo n.º 19
0
def test_get_check_table(monkeypatch, hostname, expected_result):
    autochecks = {
        "ping-host": [
            Service(
                CheckPluginName("smart_temp"),
                "bla",
                u'Temperature SMART bla',
                {},
            )
        ],
        "autocheck-overwrite": [
            Service(
                CheckPluginName('smart_temp'),
                '/dev/sda',
                u'Temperature SMART /dev/sda',
                {"is_autocheck": True},
            ),
            Service(
                CheckPluginName('smart_temp'),
                '/dev/sdb',
                u'Temperature SMART /dev/sdb',
                {"is_autocheck": True},
            ),
        ],
        "ignore-not-existing-checks": [
            Service(
                CheckPluginName("bla_blub"),
                "ITEM",
                u'Blub ITEM',
                {},
            ),
        ],
        "node1": [
            Service(
                CheckPluginName("smart_temp"),
                "auto-clustered",
                u"Temperature SMART auto-clustered",
                {},
            ),
            Service(
                CheckPluginName("smart_temp"),
                "auto-not-clustered",
                u'Temperature SMART auto-not-clustered',
                {},
            )
        ],
    }

    ts = Scenario().add_host(hostname, tags={"criticality": "test"})
    ts.add_host("ping-host", tags={"agent": "no-agent"})
    ts.add_host("node1")
    ts.add_cluster("cluster1", nodes=["node1"])
    ts.set_option(
        "static_checks",
        {
            "temperature": [
                (('smart.temp', '/dev/sda', {}), [],
                 ["no-autochecks", "autocheck-overwrite"]),
                (('blub.bla', 'ITEM', {}), [], ["ignore-not-existing-checks"]),
                (('smart.temp', 'ITEM1', {}), [], ["ignore-disabled-rules"], {
                    "disabled": True
                }),
                (('smart.temp', 'ITEM2', {}), [], ["ignore-disabled-rules"]),
                (('smart.temp', '/dev/sda', {
                    "rule": 1
                }), [], ["static-check-overwrite"]),
                (('smart.temp', '/dev/sda', {
                    "rule": 2
                }), [], ["static-check-overwrite"]),
                (('smart.temp', 'static-node1', {}), [], ["node1"]),
                (('smart.temp', 'static-cluster', {}), [], ["cluster1"]),
            ]
        },
    )
    ts.set_ruleset("clustered_services", [
        ([], ['node1'], [u'Temperature SMART auto-clustered$']),
    ])
    config_cache = ts.apply(monkeypatch)
    monkeypatch.setattr(config_cache, "get_autochecks_of",
                        lambda h: autochecks.get(h, []))

    assert check_table.get_check_table(hostname) == expected_result
Ejemplo n.º 20
0
def get_aggregated_result(
    multi_host_sections: MultiHostSections,
    host_config: config.HostConfig,
    ipaddress: Optional[HostAddress],
    service: Service,
    plugin: Optional[checking_classes.CheckPlugin],
    params_function: Callable[[], Parameters],
) -> Tuple[bool, bool, ServiceCheckResult]:
    """Run the check function and aggregate the subresults

    This function is also called during discovery.

    Returns a triple:
       bool: should the result be submitted to the core
       bool: did we receive data for the plugin
       ServiceCheckResult: The aggregated result as returned by the plugin, or a fallback

    """
    if plugin is None:
        return False, True, CHECK_NOT_IMPLEMENTED

    check_function = (plugin.cluster_check_function
                      if host_config.is_cluster else plugin.check_function)

    source_type = (SourceType.MANAGEMENT
                   if service.check_plugin_name.is_management_name() else SourceType.HOST)

    config_cache = config.get_config_cache()

    kwargs = {}
    try:
        kwargs = multi_host_sections.get_section_cluster_kwargs(
            config_cache.get_clustered_service_node_keys(
                host_config.hostname,
                source_type,
                service.description,
                ip_lookup.lookup_ip_address,
            ) or [],
            plugin.sections,
        ) if host_config.is_cluster else multi_host_sections.get_section_kwargs(
            HostKey(host_config.hostname, ipaddress, source_type),
            plugin.sections,
        )

        if not kwargs and not service.check_plugin_name.is_management_name():
            # in 1.6 some plugins where discovered for management boards, but with
            # the regular host plugins name. In this case retry with the source type
            # forced to MANAGEMENT:
            kwargs = multi_host_sections.get_section_cluster_kwargs(
                config_cache.get_clustered_service_node_keys(
                    host_config.hostname,
                    SourceType.MANAGEMENT,
                    service.description,
                    ip_lookup.lookup_ip_address,
                ) or [],
                plugin.sections,
            ) if host_config.is_cluster else multi_host_sections.get_section_kwargs(
                HostKey(host_config.hostname, ipaddress, SourceType.MANAGEMENT),
                plugin.sections,
            )

        if not kwargs:  # no data found
            return False, False, RECEIVED_NO_DATA

        if service.item is not None:
            kwargs["item"] = service.item

        if plugin.check_default_parameters is not None:
            kwargs["params"] = params_function()

        with value_store.context(plugin.name, service.item):
            result = _aggregate_results(check_function(**kwargs))

    except (item_state.MKCounterWrapped, checking_classes.IgnoreResultsError) as e:
        msg = str(e) or "No service summary available"
        return False, True, (0, msg, [])

    except MKTimeout:
        raise

    except Exception:
        if cmk.utils.debug.enabled():
            raise
        result = 3, cmk.base.crash_reporting.create_check_crash_dump(
            host_config.hostname,
            service.check_plugin_name,
            kwargs,
            is_manual_check(host_config.hostname, service.id()),
            service.description,
        ), []

    return True, True, result
Ejemplo n.º 21
0
def test_get_check_table_of_static_check(monkeypatch, hostname,
                                         expected_result):
    static_checks = {
        "df_host": [
            Service(CheckPluginName('df'), '/snap/core/9066',
                    u'Filesystem /snap/core/9066',
                    [{
                        'tp_values': [('24X7', {
                            'inodes_levels': None
                        })],
                        'tp_default_value': {}
                    }, {
                        'trend_range': 24,
                        'show_levels': 'onmagic',
                        'inodes_levels': (10.0, 5.0),
                        'magic_normsize': 20,
                        'show_inodes': 'onlow',
                        'levels': (80.0, 90.0),
                        'show_reserved': False,
                        'levels_low': (50.0, 60.0),
                        'trend_perfdata': True
                    }]),
        ],
        "df_host_1": [
            Service(
                CheckPluginName('df'), '/snap/core/9067',
                u'Filesystem /snap/core/9067', {
                    'trend_range': 24,
                    'show_levels': 'onmagic',
                    'inodes_levels': (10.0, 5.0),
                    'magic_normsize': 20,
                    'show_inodes': 'onlow',
                    'levels': (80.0, 90.0),
                    'tp_default_value': {
                        'levels': (87.0, 90.0)
                    },
                    'show_reserved': False,
                    'tp_values': [('24X7', {
                        'inodes_levels': None
                    })],
                    'levels_low': (50.0, 60.0),
                    'trend_perfdata': True
                })
        ],
        "df_host_2": [
            Service(CheckPluginName('df'), '/snap/core/9068',
                    u'Filesystem /snap/core/9068', None)
        ],
    }

    ts = Scenario().add_host(hostname, tags={"criticality": "test"})
    ts.add_host("df_host")
    ts.add_host("df_host_1")
    ts.add_host("df_host_2")
    ts.set_option(
        "static_checks",
        {
            "filesystem": [
                (('df', '/snap/core/9066', [{
                    'tp_values': [('24X7', {
                        'inodes_levels': None
                    })],
                    'tp_default_value': {}
                }, {
                    'trend_range': 24,
                    'show_levels': 'onmagic',
                    'inodes_levels': (10.0, 5.0),
                    'magic_normsize': 20,
                    'show_inodes': 'onlow',
                    'levels': (80.0, 90.0),
                    'show_reserved': False,
                    'levels_low': (50.0, 60.0),
                    'trend_perfdata': True
                }]), [], ["df_host"]),
                (('df', '/snap/core/9067', [{
                    'tp_values': [('24X7', {
                        'inodes_levels': None
                    })],
                    'tp_default_value': {}
                }, {
                    'trend_range': 24,
                    'show_levels': 'onmagic',
                    'inodes_levels': (10.0, 5.0),
                    'magic_normsize': 20,
                    'show_inodes': 'onlow',
                    'levels': (80.0, 90.0),
                    'show_reserved': False,
                    'levels_low': (50.0, 60.0),
                    'trend_perfdata': True
                }]), [], ["df_host_1"]),
                (('df', '/snap/core/9068', None), [], ["df_host_2"]),
            ],
        },
    )

    config_cache = ts.apply(monkeypatch)
    monkeypatch.setattr(config_cache, "get_autochecks_of",
                        lambda h: static_checks.get(h, []))

    assert list(
        check_table.get_check_table(hostname).keys()) == expected_result
Ejemplo n.º 22
0
        ("@", []),
        ("[abc123]", []),
        # Dict: Allow non string items
        (
            """[
  {'check_plugin_name': 'df', 'item': u'123', 'parameters': {}, 'service_labels': {}},
]""",
            [
                Service(
                    CheckPluginName("df"),
                    "123",
                    "",
                    {
                        "inodes_levels": (10.0, 5.0),
                        "levels": (80.0, 90.0),
                        "levels_low": (50.0, 60.0),
                        "magic_normsize": 20,
                        "show_inodes": "onlow",
                        "show_levels": "onmagic",
                        "show_reserved": False,
                        "trend_perfdata": True,
                        "trend_range": 24,
                    },
                ),
            ],
        ),
        # Dict: Exception on invalid check type
        (
            """[
  {'check_plugin_name': 123, 'item': 'abc', 'parameters': {}, 'service_labels': {}},
]""",
            MKGeneralException,
Ejemplo n.º 23
0
        (u"[]", []),
        (u"", []),
        (u"@", []),
        (u"[abc123]", []),
        # Dict: Allow non string items
        (
            u"""[
  {'check_plugin_name': 'df', 'item': 123, 'parameters': {}, 'service_labels': {}},
]""",
            [
                Service(
                    'df', 123, u"", {
                        'inodes_levels': (10.0, 5.0),
                        'levels': (80.0, 90.0),
                        'levels_low': (50.0, 60.0),
                        'magic_normsize': 20,
                        'show_inodes': 'onlow',
                        'show_levels': 'onmagic',
                        'show_reserved': False,
                        'trend_perfdata': True,
                        'trend_range': 24
                    }),
            ],
        ),
        # Dict: Exception on invalid check type
        (
            u"""[
  {'check_plugin_name': 123, 'item': 'abc', 'parameters': {}, 'service_labels': {}},
]""",
            MKGeneralException,
        ),
        # Dict: Regular processing
Ejemplo n.º 24
0
def get_aggregated_result(
    parsed_sections_broker: ParsedSectionsBroker,
    host_config: config.HostConfig,
    ipaddress: Optional[HostAddress],
    service: Service,
    plugin: Optional[checking_classes.CheckPlugin],
    timespecific_parameters: Union[LegacyCheckParameters,
                                   TimespecificParameters],
    *,
    value_store_manager: value_store.ValueStoreManager,
    persist_value_store_changes: bool,
) -> AggregatedResult:
    """Run the check function and aggregate the subresults

    This function is also called during discovery.
    """
    if plugin is None:
        return AggregatedResult(
            submit=True,
            data_received=True,
            result=ServiceCheckResult.check_not_implemented(),
            cache_info=None,
        )

    config_cache = config.get_config_cache()
    check_function = (_cluster_modes.get_cluster_check_function(
        *config_cache.get_clustered_service_configuration(
            host_config.hostname,
            service.description,
        ),
        plugin=plugin,
        service_id=service.id(),
        persist_value_store_changes=persist_value_store_changes,
    ) if host_config.is_cluster else plugin.check_function)

    section_kws, error_result = _get_monitoring_data_kwargs_handle_pre20_services(
        parsed_sections_broker,
        host_config,
        config_cache,
        ipaddress,
        service,
        plugin.sections,
    )
    if not section_kws:  # no data found
        return AggregatedResult(
            submit=False,
            data_received=False,
            result=error_result,
            cache_info=None,
        )

    item_kw = {} if service.item is None else {"item": service.item}
    params_kw = ({} if plugin.check_default_parameters is None else {
        "params":
        _final_read_only_check_parameters(timespecific_parameters)
    })

    try:
        with plugin_contexts.current_host(
                host_config.hostname), plugin_contexts.current_service(
                    service), value_store_manager.namespace(service.id()):
            result = _aggregate_results(
                check_function(
                    **item_kw,
                    **params_kw,
                    **section_kws,
                ))

    except (item_state.MKCounterWrapped,
            checking_classes.IgnoreResultsError) as e:
        msg = str(e) or "No service summary available"
        return AggregatedResult(
            submit=False,
            data_received=True,
            result=ServiceCheckResult(output=msg),
            cache_info=None,
        )
    except MKTimeout:
        raise
    except Exception:
        if cmk.utils.debug.enabled():
            raise
        table = check_table.get_check_table(host_config.hostname,
                                            skip_autochecks=True)
        result = ServiceCheckResult(
            3,
            cmk.base.crash_reporting.create_check_crash_dump(
                host_name=host_config.hostname,
                service_name=service.description,
                plugin_name=service.check_plugin_name,
                plugin_kwargs={
                    **item_kw,
                    **params_kw,
                    **section_kws
                },
                is_manual=service.id() in table,
            ),
        )

    return AggregatedResult(
        submit=True,
        data_received=True,
        result=result,
        cache_info=parsed_sections_broker.get_cache_info(plugin.sections),
    )
Ejemplo n.º 25
0
def test_parse_diskstat_predictive(mocker: MockerFixture):
    # SUP-5924
    DATA = [
        ["1617784511"],
        [
            "259",
            "0",
            "nvme0n1",
            "131855",
            "42275",
            "8019303",
            "34515",
            "386089",
            "166344",
            "13331634",
            "138121",
            "0",
            "185784",
            "177210",
            "0",
            "0",
            "0",
            "0",
            "41445",
            "4574",
        ],
        [
            "53",
            "0",
            "dm-0",
            "172574",
            "0",
            "7980626",
            "74812",
            "548159",
            "0",
            "12443656",
            "706944",
            "0",
            "189576",
            "781756",
            "0",
            "0",
            "0",
            "0",
            "0",
            "0",
        ],
        [
            "53",
            "1",
            "dm-1",
            "171320",
            "0",
            "7710074",
            "74172",
            "546564",
            "0",
            "12514416",
            "674352",
            "0",
            "186452",
            "748524",
            "0",
            "0",
            "0",
            "0",
            "0",
            "0",
        ],
        [
            "53",
            "2",
            "dm-2",
            "194",
            "0",
            "8616",
            "68",
            "0",
            "0",
            "0",
            "0",
            "0",
            "72",
            "68",
            "0",
            "0",
            "0",
            "0",
            "0",
            "0",
        ],
        ["[dmsetup_info]"],
        ["vme0n1p3_crypt", "253:0"],
        ["buntu--vg-swap_1", "253:2", "ubuntu-vg", "swap_1"],
        ["buntu--vg-root", "253:1", "ubuntu-vg", "root"],
    ]

    PARAMS = {
        "average": 300,
        "latency": (80.0, 160.0),
        "read": {
            "horizon": 90,
            "levels_lower": ("absolute", (2.0, 4.0)),
            "levels_upper": ("relative", (10.0, 20.0)),
            "levels_upper_min": (10.0, 15.0),
            "period": "wday",
        },
        "read_ios": (400.0, 600.0),
        "read_latency": (80.0, 160.0),
        "read_wait": (30.0, 50.0),
        "utilization": (80.0, 90.0),
        "write": (50.0, 100.0),
        "write_ios": (300.0, 400.0),
        "write_latency": (80.0, 160.0),
        "write_wait": (30.0, 50.0),
    }

    mocker.patch(
        "cmk.base.check_api._prediction.get_levels", return_value=(None, (2.1, 4.1, None, None))
    )
    dummy_service: Service[LegacyCheckParameters] = Service(
        CheckPluginName("unittest_sd"),
        parameters={},
        item="item-nvme0n1",
        description="unittest_sd_description",
    )
    with plugin_contexts.current_host("unittest-hn"), plugin_contexts.current_service(
        dummy_service
    ):

        with pytest.raises(IgnoreResultsError):
            list(diskstat.check_diskstat("nvme0n1", PARAMS, diskstat.parse_diskstat(DATA), None))
        DATA[0][0] = "1617784512"
        assert list(
            diskstat.check_diskstat(
                "nvme0n1",
                PARAMS,
                diskstat.parse_diskstat(DATA),
                None,
            )
        ) == [
            Result(state=state.OK, notice="All values averaged over 5 minutes 0 seconds"),
            Result(state=state.OK, notice="Utilization: 0%"),
            Metric("disk_utilization", 0.0, levels=(0.8, 0.9)),
            Result(state=state.OK, summary="Read: 0.00 B/s (no reference for prediction yet)"),
            Metric("disk_read_throughput", 0.0, levels=(2.1, 4.1)),  # fake levels are quite low
            Result(state=state.OK, summary="Write: 0.00 B/s"),
            Metric("disk_write_throughput", 0.0, levels=(50000000.0, 100000000.0)),
            Result(state=state.OK, notice="Average wait: 0 seconds"),
            Metric("disk_average_wait", 0.0),
            Result(state=state.OK, notice="Average read wait: 0 seconds"),
            Metric("disk_average_read_wait", 0.0, levels=(0.03, 0.05)),
            Result(state=state.OK, notice="Average write wait: 0 seconds"),
            Metric("disk_average_write_wait", 0.0, levels=(0.03, 0.05)),
            Result(state=state.OK, notice="Average queue length: 0.00"),
            Metric("disk_queue_length", 0.0),
            Result(state=state.OK, notice="Read operations: 0.00/s"),
            Metric("disk_read_ios", 0.0, levels=(400.0, 600.0)),
            Result(state=state.OK, notice="Write operations: 0.00/s"),
            Metric("disk_write_ios", 0.0, levels=(300.0, 400.0)),
            Result(state=state.OK, summary="Latency: 0 seconds"),
            Metric("disk_latency", 0.0, levels=(0.08, 0.16)),
            Metric("disk_average_read_request_size", 0.0),
            Metric("disk_average_request_size", 0.0),
            Metric("disk_average_write_request_size", 0.0),
        ]
Ejemplo n.º 26
0
def get_aggregated_result(
    parsed_sections_broker: ParsedSectionsBroker,
    host_config: config.HostConfig,
    ipaddress: Optional[HostAddress],
    service: Service,
    plugin: Optional[checking_classes.CheckPlugin],
    # missleading. These are prams that *may* be *partially* time specific
    timespecific_parameters: LegacyCheckParameters,
    *,
    value_store_manager: value_store.ValueStoreManager,
    persist_value_store_changes: bool,
) -> AggregatedResult:
    """Run the check function and aggregate the subresults

    This function is also called during discovery.
    """
    if plugin is None:
        return AggregatedResult(
            submit=True,
            data_received=True,
            result=CHECK_NOT_IMPLEMENTED,
            cache_info=None,
        )

    config_cache = config.get_config_cache()
    check_function = (
        _cluster_modes.get_cluster_check_function(
            *config_cache.get_clustered_service_configuration(
                host_config.hostname,
                service.description,
            ),
            plugin=plugin,
            service_id=service.id(),
            persist_value_store_changes=persist_value_store_changes,
        )
        if host_config.is_cluster
        else plugin.check_function
    )
    source_type = (
        SourceType.MANAGEMENT if service.check_plugin_name.is_management_name() else SourceType.HOST
    )
    try:
        kwargs = (
            get_section_cluster_kwargs(
                parsed_sections_broker,
                config_cache.get_clustered_service_node_keys(
                    host_config.hostname,
                    source_type,
                    service.description,
                )
                or [],
                plugin.sections,
            )
            if host_config.is_cluster
            else get_section_kwargs(
                parsed_sections_broker,
                HostKey(host_config.hostname, ipaddress, source_type),
                plugin.sections,
            )
        )
        if not kwargs and not service.check_plugin_name.is_management_name():
            # in 1.6 some plugins where discovered for management boards, but with
            # the regular host plugins name. In this case retry with the source type
            # forced to MANAGEMENT:
            kwargs = (
                get_section_cluster_kwargs(
                    parsed_sections_broker,
                    config_cache.get_clustered_service_node_keys(
                        host_config.hostname,
                        SourceType.MANAGEMENT,
                        service.description,
                    )
                    or [],
                    plugin.sections,
                )
                if host_config.is_cluster
                else get_section_kwargs(
                    parsed_sections_broker,
                    HostKey(host_config.hostname, ipaddress, SourceType.MANAGEMENT),
                    plugin.sections,
                )
            )
        if not kwargs:  # no data found
            return AggregatedResult(
                submit=False,
                data_received=False,
                result=RECEIVED_NO_DATA,
                cache_info=None,
            )

        kwargs = {
            **kwargs,
            **({} if service.item is None else {"item": service.item}),
            **(
                {}
                if plugin.check_default_parameters is None
                else {"params": _final_read_only_check_parameters(timespecific_parameters)}
            ),
        }
        with plugin_contexts.current_host(host_config.hostname), plugin_contexts.current_service(
            service
        ), value_store_manager.namespace(service.id()):
            result = _aggregate_results(check_function(**kwargs))

    except (item_state.MKCounterWrapped, checking_classes.IgnoreResultsError) as e:
        msg = str(e) or "No service summary available"
        return AggregatedResult(
            submit=False,
            data_received=True,
            result=(0, msg, []),
            cache_info=None,
        )
    except MKTimeout:
        raise
    except Exception:
        if cmk.utils.debug.enabled():
            raise
        table = check_table.get_check_table(host_config.hostname, skip_autochecks=True)
        result = (
            3,
            cmk.base.crash_reporting.create_check_crash_dump(
                host_name=host_config.hostname,
                service_name=service.description,
                plugin_name=service.check_plugin_name,
                plugin_kwargs=globals().get("kwargs", {}),
                is_manual=service.id() in table,
            ),
            [],
        )

    return AggregatedResult(
        submit=True,
        data_received=True,
        result=result,
        cache_info=parsed_sections_broker.get_cache_info(plugin.sections),
    )
Ejemplo n.º 27
0
def _execute_check_legacy_mode(multi_host_sections: MultiHostSections, hostname: HostName,
                               ipaddress: Optional[HostAddress], service: Service) -> bool:
    legacy_check_plugin_name = config.legacy_check_plugin_names.get(service.check_plugin_name)
    if legacy_check_plugin_name is None:
        _submit_check_result(hostname, service.description, CHECK_NOT_IMPLEMENTED, None)
        return True

    check_function = config.check_info[legacy_check_plugin_name].get("check_function")
    if check_function is None:
        _submit_check_result(hostname, service.description, CHECK_NOT_IMPLEMENTED, None)
        return True

    # Make a bit of context information globally available, so that functions
    # called by checks know this context. check_api_utils.set_service has
    # already been called.
    item_state.set_item_state_prefix(str(service.check_plugin_name), service.item)

    section_name = legacy_check_plugin_name.split('.')[0]

    section_content = None
    mgmt_board_info = config.get_management_board_precedence(section_name, config.check_info)
    source_type = SourceType.MANAGEMENT if mgmt_board_info == LEGACY_MGMT_ONLY else SourceType.HOST
    try:
        section_content = multi_host_sections.get_section_content(
            HostKey(hostname, ipaddress, source_type),
            mgmt_board_info,
            section_name,
            for_discovery=False,
            cluster_node_keys=config.get_config_cache().get_clustered_service_node_keys(
                hostname,
                source_type,
                service.description,
                ip_lookup.lookup_ip_address,
            ),
            check_legacy_info=config.check_info,
        )

        # TODO: Move this to a helper function
        if section_content is None:  # No data for this check type
            return False

        # Call the actual check function
        item_state.reset_wrapped_counters()

        used_params = legacy_determine_check_params(service.parameters)
        raw_result = check_function(service.item, used_params, section_content)
        result = sanitize_check_result(raw_result)
        item_state.raise_counter_wrap()

    except item_state.MKCounterWrapped as e:
        # handle check implementations that do not yet support the
        # handling of wrapped counters via exception on their own.
        # Do not submit any check result in that case:
        console.verbose("%-20s PEND - Cannot compute check result: %s\n",
                        ensure_str(service.description), e)
        # Don't submit to core - we're done.
        return True

    except MKTimeout:
        raise

    except Exception:
        if cmk.utils.debug.enabled():
            raise
        result = 3, cmk.base.crash_reporting.create_check_crash_dump(
            hostname,
            service.check_plugin_name,
            {
                "item": service.item,
                "params": used_params,
                "section_content": section_content
            },
            is_manual_check(hostname, service.id()),
            service.description,
        ), []

    _submit_check_result(
        hostname,
        service.description,
        result,
        _legacy_determine_cache_info(multi_host_sections, SectionName(section_name)),
    )
    return True
Ejemplo n.º 28
0
def test_parse_diskstat_predictive(value_store: MutableMapping[str, Any],
                                   mocker: MockerFixture):
    # SUP-5924
    DATA = [
        ['1617784511'],
        [
            '259', '0', 'nvme0n1', '131855', '42275', '8019303', '34515',
            '386089', '166344', '13331634', '138121', '0', '185784', '177210',
            '0', '0', '0', '0', '41445', '4574'
        ],
        [
            '53', '0', 'dm-0', '172574', '0', '7980626', '74812', '548159',
            '0', '12443656', '706944', '0', '189576', '781756', '0', '0', '0',
            '0', '0', '0'
        ],
        [
            '53', '1', 'dm-1', '171320', '0', '7710074', '74172', '546564',
            '0', '12514416', '674352', '0', '186452', '748524', '0', '0', '0',
            '0', '0', '0'
        ],
        [
            '53', '2', 'dm-2', '194', '0', '8616', '68', '0', '0', '0', '0',
            '0', '72', '68', '0', '0', '0', '0', '0', '0'
        ],
        ['[dmsetup_info]'],
        ['vme0n1p3_crypt', '253:0'],
        ['buntu--vg-swap_1', '253:2', 'ubuntu-vg', 'swap_1'],
        ['buntu--vg-root', '253:1', 'ubuntu-vg', 'root'],
    ]

    PARAMS = {
        'average': 300,
        'latency': (80.0, 160.0),
        'read': {
            'horizon': 90,
            'levels_lower': ('absolute', (2.0, 4.0)),
            'levels_upper': ('relative', (10.0, 20.0)),
            'levels_upper_min': (10.0, 15.0),
            'period': 'wday'
        },
        'read_ios': (400.0, 600.0),
        'read_latency': (80.0, 160.0),
        'read_wait': (30.0, 50.0),
        'utilization': (80.0, 90.0),
        'write': (50.0, 100.0),
        'write_ios': (300.0, 400.0),
        'write_latency': (80.0, 160.0),
        'write_wait': (30.0, 50.0)
    }

    mocker.patch("cmk.base.check_api._prediction.get_levels",
                 return_value=(None, (2.1, 4.1, None, None)))
    dummy_service = Service(
        CheckPluginName("unittest_sd"),
        parameters={},
        item="item-nvme0n1",
        description="unittest_sd_description",
    )
    with plugin_contexts.current_host(
            "unittest-hn"), plugin_contexts.current_service(dummy_service):

        with pytest.raises(IgnoreResultsError):
            list(
                diskstat.check_diskstat("nvme0n1", PARAMS,
                                        diskstat.parse_diskstat(DATA), None))
        DATA[0][0] = '1617784512'
        assert list(
            diskstat.check_diskstat(
                "nvme0n1",
                PARAMS,
                diskstat.parse_diskstat(DATA),
                None,
            )
        ) == [
            Result(state=state.OK,
                   notice='All values averaged over 5 minutes 0 seconds'),
            Result(state=state.OK, notice='Utilization: 0%'),
            Metric('disk_utilization', 0.0, levels=(0.8, 0.9)),
            Result(state=state.OK,
                   summary='Read: 0.00 B/s (no reference for prediction yet)'),
            Metric('disk_read_throughput', 0.0,
                   levels=(2.1, 4.1)),  # fake levels are quite low
            Result(state=state.OK, summary='Write: 0.00 B/s'),
            Metric('disk_write_throughput',
                   0.0,
                   levels=(50000000.0, 100000000.0)),
            Result(state=state.OK, notice='Average wait: 0 seconds'),
            Metric('disk_average_wait', 0.0),
            Result(state=state.OK, notice='Average read wait: 0 seconds'),
            Metric('disk_average_read_wait', 0.0, levels=(0.03, 0.05)),
            Result(state=state.OK, notice='Average write wait: 0 seconds'),
            Metric('disk_average_write_wait', 0.0, levels=(0.03, 0.05)),
            Result(state=state.OK, notice='Average queue length: 0.00'),
            Metric('disk_queue_length', 0.0),
            Result(state=state.OK, notice='Read operations: 0.00/s'),
            Metric('disk_read_ios', 0.0, levels=(400.0, 600.0)),
            Result(state=state.OK, notice='Write operations: 0.00/s'),
            Metric('disk_write_ios', 0.0, levels=(300.0, 400.0)),
            Result(state=state.OK, summary='Latency: 0 seconds'),
            Metric('disk_latency', 0.0, levels=(0.08, 0.16)),
            Metric('disk_average_read_request_size', 0.0),
            Metric('disk_average_request_size', 0.0),
            Metric('disk_average_write_request_size', 0.0),
        ]