def _transform_ignored_checks_to_maincheckified_list(self, all_rulesets): ignored_checks_ruleset = all_rulesets.get("ignored_checks") if ignored_checks_ruleset.is_empty(): return for _folder, _index, rule in ignored_checks_ruleset.get_rules(): if isinstance(rule.value, str): rule.value = [maincheckify(rule.value)] else: rule.value = [maincheckify(s) for s in rule.value]
def _get_static_check_entries( config_cache: config.ConfigCache, host_config: config.HostConfig, ) -> Iterator[Service]: entries: List[Service] = [] for _checkgroup_name, check_plugin_name_str, item, params in host_config.static_checks: # TODO (mo): centralize maincheckify: CMK-4295 # in this case: move it to the transform of the static services rule. check_plugin_name = CheckPluginName(maincheckify(check_plugin_name_str)) descr = config.service_description(host_config.hostname, check_plugin_name, item) new_parameters = config.compute_check_parameters( config_cache.host_of_clustered_service(host_config.hostname, descr), check_plugin_name, item, {}, configured_parameters=TimespecificParameters((params,)), ) entries.append(Service(check_plugin_name, item, descr, new_parameters)) # Note: We need to reverse the order of the static_checks. This is # because users assume that earlier rules have precedence over later # ones. For static checks that is important if there are two rules for # a host with the same combination of check type and item. return reversed(entries)
def create_inventory_plugin_from_legacy( inventory_plugin_name: str, inventory_info_dict: Dict[str, Any], extra_sections_count: int, ) -> InventoryPlugin: if inventory_info_dict.get('depends_on'): # TODO (mo): this affects only inv_cisco_vlans. For now, we silently # skip this, until we migrate inv_cisco_vlans to the new api; then # raise NotImplementedError("cannot auto-migrate plugins with dependencies") pass new_inventory_name = maincheckify(inventory_plugin_name) legacy_inventory_function = inventory_info_dict["inv_function"] has_parameters = _function_has_params(legacy_inventory_function) inventory_function = _create_inventory_function( legacy_inventory_function, has_parameters, extra_sections_count, ) return create_inventory_plugin( name=new_inventory_name, sections=[inventory_plugin_name.split('.', 1)[0]], inventory_function=inventory_function, inventory_default_parameters={} if has_parameters else None, inventory_ruleset_name=inventory_plugin_name if has_parameters else None, module=None, )
def create_inventory_plugin_from_legacy( inventory_plugin_name: str, inventory_info_dict: Dict[str, Any], extra_sections_count: int, ) -> InventoryPlugin: if inventory_info_dict.get('depends_on'): raise NotImplementedError( "cannot auto-migrate plugins with dependencies") new_inventory_name = maincheckify(inventory_plugin_name) legacy_inventory_function = inventory_info_dict["inv_function"] has_parameters = _function_has_params(legacy_inventory_function) inventory_function = _create_inventory_function( legacy_inventory_function, has_parameters, extra_sections_count, ) return create_inventory_plugin( name=new_inventory_name, sections=[inventory_plugin_name.split('.', 1)[0]], inventory_function=inventory_function, inventory_default_parameters={} if has_parameters else None, inventory_ruleset_name=inventory_plugin_name if has_parameters else None, module=None, )
def run_test_on_checks(check, subcheck, dataset, info_arg, immu, write): """Run check for test case listed in dataset""" test_cases = getattr(dataset, "checks", {}).get(subcheck, []) check_func = check.info.get("check_function") check_plugin_name = CheckPluginName(maincheckify(check.name)) for item, params, results_expected_raw in test_cases: print("Dataset item %r in check %r" % (item, check.name)) immu.register(params, "params") with current_service( Service( item=item, check_plugin_name=check_plugin_name, description="unit test description", parameters={}, ) ): result = CheckResult(check.run_check(item, params, info_arg)) immu.test(" after check (%s): " % check_func.__name__) result_expected = CheckResult(results_expected_raw) if write: new_entry = (item, params, result.raw_repr()) dataset.update_check_result(subcheck, new_entry) else: assertCheckResultsEqual(result, result_expected)
def execute_check(multi_host_sections: MultiHostSections, host_config: config.HostConfig, ipaddress: Optional[HostAddress], service: Service) -> bool: # TODO (mo): centralize maincheckify: CMK-4295 plugin_name = CheckPluginName(maincheckify(service.check_plugin_name)) plugin = config.get_registered_check_plugin(plugin_name) # check if we must use legacy mode. remove this block entirely one day if (plugin is not None and host_config.is_cluster and plugin.cluster_check_function.__name__ == CLUSTER_LEGACY_MODE_FROM_HELL): return _execute_check_legacy_mode( multi_host_sections, host_config.hostname, ipaddress, service, ) submit, data_received, result = get_aggregated_result( multi_host_sections, host_config, ipaddress, service, plugin, lambda: determine_check_params(service.parameters), ) if submit: _submit_check_result( host_config.hostname, service.description, result, multi_host_sections.get_cache_info(plugin.sections) if plugin else None, ) elif data_received: console.verbose("%-20s PEND - %s\n", ensure_str(service.description), result[1]) return data_received
def _make_piggy_nodes( host_config: HostConfig ) -> List[Tuple[HostName, Optional[HostAddress], "DataSources"]]: """Abstract clusters/nodes/hosts""" assert host_config.nodes is not None import cmk.base.data_sources.abstract as abstract # pylint: disable=import-outside-toplevel abstract.DataSource.set_may_use_cache_file() nodes = [] for hostname in host_config.nodes: ipaddress = ip_lookup.lookup_ip_address(hostname) check_names = check_table.get_needed_check_names( hostname, remove_duplicates=True, filter_mode="only_clustered", ) selected_raw_sections = config.get_relevant_raw_sections( # TODO (mo): centralize maincheckify: CMK-4295 CheckPluginName(maincheckify(n)) for n in check_names) sources = DataSources( hostname, ipaddress, sources=make_sources( host_config, ipaddress, selected_raw_sections=selected_raw_sections, ), ) nodes.append((hostname, ipaddress, sources)) return nodes
def _get_static_check_entries(host_config: config.HostConfig,) -> Iterator[Service]: entries: List[Service] = [] for _checkgroup_name, check_plugin_name_str, item, params in host_config.static_checks: # TODO (mo): centralize maincheckify: CMK-4295 check_plugin_name = CheckPluginName(maincheckify(check_plugin_name_str)) if config.has_timespecific_params(params): timespec_params = [params] params = {} else: timespec_params = [] new_params = config.compute_check_parameters( host_config.hostname, check_plugin_name, item, params, for_static_checks=True, ) if timespec_params: params = config.set_timespecific_param_list(timespec_params, new_params) else: params = new_params descr = config.service_description(host_config.hostname, check_plugin_name, item) entries.append(Service(check_plugin_name, item, descr, params)) # Note: We need to reverse the order of the static_checks. This is # because users assume that earlier rules have precedence over later # ones. For static checks that is important if there are two rules for # a host with the same combination of check type and item. return reversed(entries)
def _read_raw_autochecks_uncached( self, hostname: HostName, ) -> Sequence[_AutocheckService]: """Read automatically discovered checks of one host""" path = _autochecks_path_for(hostname) try: autochecks_raw = _load_raw_autochecks( path=path, check_variables=None, ) except SyntaxError as e: logger.exception("Syntax error in file %s: %s", path, e) if cmk.utils.debug.enabled(): raise return [] except Exception as e: logger.exception("Error in file %s:\n%s", path, e) if cmk.utils.debug.enabled(): raise return [] services = [] for entry in autochecks_raw: try: item = entry["item"] except TypeError: # pre 1.6 tuple! raise MKGeneralException( "Invalid check entry '%r' of host '%s' (%s) found. This " "entry is in pre Checkmk 1.6 format and needs to be converted. This is " 'normally done by "cmk-update-config -v" during "omd update". Please ' 'execute "cmk-update-config -v" for convertig the old configuration.' % (entry, hostname, path)) try: plugin_name = CheckPluginName( maincheckify(entry["check_plugin_name"])) assert item is None or isinstance(item, str) except Exception: raise MKGeneralException( "Invalid check entry '%r' of host '%s' (%s) found. This " "entry is in pre Checkmk 2.0 format and needs to be converted. This is " 'normally done by "cmk-update-config -v" during "omd update". Please ' 'execute "cmk-update-config -v" for convertig the old configuration.' % (entry, hostname, path)) labels = DiscoveredServiceLabels() for label_id, label_value in entry["service_labels"].items(): labels.add_label(ServiceLabel(label_id, label_value)) services.append( _AutocheckService( check_plugin_name=plugin_name, item=item, discovered_parameters=entry["parameters"], service_labels=labels, )) return services
def resolve_legacy_name(plugin_name: CheckPluginName) -> str: """Get legacy plugin name back""" # TODO (mo): remove this with CMK-4295. Function is only needed during transition for legacy_name in config.check_info: if str(plugin_name) == maincheckify(legacy_name): return legacy_name # nothing found, it may be a new plugin, which is OK. return str(plugin_name)
def _find_legacy_check_name( new_check_plugin_name_str: str, potential_legacy_names: Iterable[str], ) -> Optional[str]: for pot_legacy_name in potential_legacy_names: if maincheckify(pot_legacy_name) == new_check_plugin_name_str: return pot_legacy_name return None
def _get_filtered_services( host_name: HostName, belongs_to_cluster: bool, config_cache: config.ConfigCache, only_check_plugins: Optional[Set[CheckPluginName]] = None, ) -> List[Service]: services = check_table.get_precompiled_check_table( host_name, remove_duplicates=True, filter_mode="include_clustered" if belongs_to_cluster else None, ) # When check types are specified via command line, enforce them. Otherwise use the # list of checks defined by the check table. if only_check_plugins is None: only_check_plugins = { # TODO (mo): make service.check_plugin_name a CheckPluginName instance and thus # TODO (mo): centralize maincheckify: CMK-4295 CheckPluginName(maincheckify(service.check_plugin_name)) for service in services } def _is_not_of_host(service): return host_name != config_cache.host_of_clustered_service(host_name, service.description) # Filter out check types which are not used on the node if belongs_to_cluster: removed_plugins = { plugin for plugin in only_check_plugins if all( _is_not_of_host(service) for service in services # TODO (mo): centralize maincheckify: CMK-4295 if CheckPluginName(maincheckify(service.check_plugin_name)) == plugin) } only_check_plugins -= removed_plugins return [ service for service in services if ( # TODO (mo): centralize maincheckify: CMK-4295 CheckPluginName(maincheckify(service.check_plugin_name)) in only_check_plugins and not (belongs_to_cluster and _is_not_of_host(service)) and not service_outside_check_period(config_cache, host_name, service.description)) ]
def create_check_plugin_from_legacy( check_plugin_name: str, check_info_dict: Dict[str, Any], forbidden_names: List[CheckPluginName]) -> CheckPlugin: if check_info_dict.get('extra_sections'): raise NotImplementedError( "[%s]: cannot auto-migrate plugins with extra sections" % check_plugin_name) if check_info_dict.get("node_info"): # We refuse to tranform these. The requirement of adding the node info # makes rewriting of the base code too difficult. # Affected Plugins must be migrated manually after CMK-4240 is done. raise NotImplementedError( "[%s]: cannot auto-migrate plugins with node info" % check_plugin_name) # make sure we haven't missed something important: unconsidered_keys = set(check_info_dict) - CONSIDERED_KEYS assert not unconsidered_keys, ( "Unconsidered key(s) in check_info[%r]: %r" % (check_plugin_name, unconsidered_keys)) new_check_name = maincheckify(check_plugin_name) check_default_parameters = _create_wrapped_parameters( check_plugin_name, check_info_dict) discovery_function = _create_discovery_function(check_plugin_name, check_info_dict) check_ruleset_name = check_info_dict.get("group") if check_ruleset_name is None and check_default_parameters is not None: check_ruleset_name = DUMMY_RULESET_NAME check_function = _create_check_function( check_plugin_name, check_info_dict, check_ruleset_name, ) return create_check_plugin( name=new_check_name, sections=[check_plugin_name.split('.', 1)[0]], service_name=check_info_dict['service_description'], discovery_function=discovery_function, discovery_default_parameters=None, # legacy madness! discovery_ruleset_name=None, check_function=check_function, check_default_parameters=check_default_parameters, check_ruleset_name=check_ruleset_name, cluster_check_function=_create_cluster_legacy_mode_from_hell( check_function), forbidden_names=forbidden_names, )
def man_page_path(name: str) -> Optional[Path]: if name[0] != "." and name[-1] != "~": for basedir in [ cmk.utils.paths.local_check_manpages_dir, Path(cmk.utils.paths.check_manpages_dir), ]: # check plugins pre 1.7 could have dots in them. be nice and find those. p = basedir / (name if name.startswith("check-mk") else maincheckify(name)) if p.exists(): return p return None
def create_check_plugin_from_legacy( check_plugin_name: str, check_info_dict: Dict[str, Any], ) -> CheckPlugin: if check_info_dict.get('extra_sections'): raise NotImplementedError("[%s]: cannot auto-migrate plugins with extra sections" % check_plugin_name) if check_info_dict.get("node_info"): # We refuse to tranform these. The requirement of adding the node info # makes rewriting of the base code too difficult. # Affected Plugins must be migrated manually after CMK-4240 is done. raise NotImplementedError("[%s]: cannot auto-migrate plugins with node info" % check_plugin_name) # make sure we haven't missed something important: unconsidered_keys = set(check_info_dict) - CONSIDERED_KEYS assert not unconsidered_keys, ("Unconsidered key(s) in check_info[%r]: %r" % (check_plugin_name, unconsidered_keys)) new_check_name = maincheckify(check_plugin_name) check_default_parameters = _create_wrapped_parameters(check_plugin_name, check_info_dict) discovery_function = _create_discovery_function(check_plugin_name, check_info_dict) check_ruleset_name = check_info_dict.get("group") if check_ruleset_name is None and check_default_parameters is not None: check_ruleset_name = DUMMY_RULESET_NAME check_function = _create_check_function( check_plugin_name, check_info_dict, check_ruleset_name, ) return create_check_plugin( name=new_check_name, sections=[check_plugin_name.split('.', 1)[0]], service_name=check_info_dict['service_description'], discovery_function=discovery_function, discovery_default_parameters=None, # legacy madness! discovery_ruleset_name=None, check_function=check_function, check_default_parameters=check_default_parameters, check_ruleset_name=check_ruleset_name, cluster_check_function=_create_cluster_legacy_mode_from_hell(check_function), # Legacy check plugins may return an item even if the service description # does not contain a '%s'. In this case the old check API assumes an implicit, # trailing '%s'. Therefore, we disable this validation for legacy check plugins. # Once all check plugins are migrated to the new API this flag can be removed. validate_item=False, )
def man_page_path(name: str, man_page_dirs: Optional[Iterable[Path]] = None) -> Optional[Path]: if not _is_valid_basename(name): return None if man_page_dirs is None: man_page_dirs = _get_man_page_dirs() for basedir in man_page_dirs: # check plugins pre 1.7 could have dots in them. be nice and find those. p = basedir / (name if name.startswith("check-mk") else maincheckify(name)) if p.exists(): return p return None
def _parse_autocheck_entry( hostname: HostName, entry: Union[Tuple, Dict], service_description: GetServiceDescription, ) -> Optional[Service]: if isinstance(entry, tuple): check_plugin_name, item, parameters = _parse_pre_16_tuple_autocheck_entry( entry) dict_service_labels = {} elif isinstance(entry, dict): check_plugin_name, item, parameters, dict_service_labels = \ _parse_dict_autocheck_entry(entry) else: raise Exception("Invalid autocheck: Wrong type: %r" % entry) if not isinstance(check_plugin_name, str): raise Exception("Invalid autocheck: Wrong check plugin type: %r" % check_plugin_name) if isinstance(item, (int, float)): # NOTE: We exclude complex here. :-) item = str(int(item)) elif not isinstance(item, (str, type(None))): raise Exception("Invalid autocheck: Wrong item type: %r" % item) try: # Pre 1.7 check plugins had dots in the check plugin name. With the new check API in # 1.7 they are replaced by '_', renaming e.g. 'cpu.loads' to 'cpu_loads'. plugin_name = CheckPluginName(maincheckify(check_plugin_name)) except Exception: raise Exception("Invalid autocheck: Wrong check plugin name: %r" % check_plugin_name) try: description = service_description(hostname, plugin_name, item) except Exception: return None # ignore return Service( check_plugin_name=plugin_name, item=item, description=description, parameters=parameters, service_labels=_parse_discovered_service_label_from_dict( dict_service_labels), )
def _get_service(self, item: Optional[str]): from cmk.utils.type_defs import CheckPluginName from cmk.utils.check_utils import maincheckify from cmk.base.check_utils import Service description = self.info["service_description"] assert description, '%r is missing a service_description' % self.name if item is not None: assert "%s" in description, ("Missing '%%s' formatter in service description of %r" % self.name) description = description % item return Service( item=item, check_plugin_name=CheckPluginName(maincheckify(self.name)), description=description, parameters={}, )
def _parse_autocheck_entry( hostname: HostName, entry: Union[Tuple, Dict], service_description: GetServiceDescription, ) -> Optional[Service]: if isinstance(entry, tuple): check_plugin_name, item, parameters = _parse_pre_16_tuple_autocheck_entry( entry) dict_service_labels = {} elif isinstance(entry, dict): check_plugin_name, item, parameters, dict_service_labels = \ _parse_dict_autocheck_entry(entry) else: raise Exception("Invalid autocheck: Wrong type: %r" % entry) if not isinstance(check_plugin_name, str): raise Exception("Invalid autocheck: Wrong check plugin type: %r" % check_plugin_name) if isinstance(item, (int, float)): # NOTE: We exclude complex here. :-) item = str(int(item)) elif not isinstance(item, (str, type(None))): raise Exception("Invalid autocheck: Wrong item type: %r" % item) try: # TODO (mo): centralize maincheckify: CMK-4295 description = service_description( hostname, CheckPluginName(maincheckify(check_plugin_name)), item, ) except Exception: return None # ignore return Service( check_plugin_name=check_plugin_name, item=item, description=description, parameters=parameters, service_labels=_parse_discovered_service_label_from_dict( dict_service_labels), )
def _do_all_checks_on_host( services: List[Service], sources: data_sources.DataSources, host_config: config.HostConfig, ipaddress: Optional[HostAddress], only_check_plugins: Optional[Set[CheckPluginName]] = None, ) -> Tuple[int, List[CheckPluginName]]: hostname: HostName = host_config.hostname num_success = 0 plugins_missing_data: Set[CheckPluginName] = set() check_api_utils.set_hostname(hostname) # Gather the data from the sources nodes = sources.make_nodes(host_config) multi_host_sections = sources.get_host_sections(nodes, max_cachefile_age=host_config.max_cachefile_age) for service in services: success = execute_check(multi_host_sections, host_config, ipaddress, service) if success: num_success += 1 else: # TODO (mo): centralize maincheckify: CMK-4295 plugins_missing_data.add(CheckPluginName(maincheckify(service.check_plugin_name))) import cmk.base.inventory as inventory # pylint: disable=import-outside-toplevel inventory.do_inventory_actions_during_checking_for( sources, multi_host_sections, host_config, ipaddress, ) return num_success, sorted(plugins_missing_data)
def _read_raw_autochecks_uncached( self, hostname: HostName, service_description: GetServiceDescription, ) -> List[Service]: """Read automatically discovered checks of one host""" path = _autochecks_path_for(hostname) try: autochecks_raw = _load_raw_autochecks( path=path, check_variables=None, ) except SyntaxError as e: console.verbose("Syntax error in file %s: %s\n", path, e, stream=sys.stderr) if cmk.utils.debug.enabled(): raise return [] except Exception as e: console.verbose("Error in file %s:\n%s\n", path, e, stream=sys.stderr) if cmk.utils.debug.enabled(): raise return [] services: List[Service] = [] for entry in autochecks_raw: if isinstance(entry, tuple): raise MKGeneralException( "Invalid check entry '%r' of host '%s' (%s) found. This " "entry is in pre Checkmk 1.6 format and needs to be converted. This is " "normally done by \"cmk-update-config -v\" during \"omd update\". Please " "execute \"cmk-update-config -v\" for convertig the old configuration." % (entry, hostname, path)) labels = DiscoveredServiceLabels() for label_id, label_value in entry["service_labels"].items(): labels.add_label(ServiceLabel(label_id, label_value)) # With Check_MK 1.2.7i3 items are now defined to be unicode strings. Convert # items from existing autocheck files for compatibility. TODO remove this one day item = entry["item"] if not isinstance(entry["check_plugin_name"], str): raise MKGeneralException( "Invalid entry '%r' in check table of host '%s': " "The check type must be a string." % (entry, hostname)) check_plugin_name_str = str(entry["check_plugin_name"]) # TODO (mo): centralize maincheckify: CMK-4295 check_plugin_name = CheckPluginName( maincheckify(check_plugin_name_str)) try: description = service_description(hostname, check_plugin_name, item) except Exception: continue # ignore services.append( Service( check_plugin_name=check_plugin_name_str, item=item, description=description, parameters=entry["parameters"], service_labels=labels, )) return services
def _read_raw_autochecks_uncached( self, hostname: HostName, service_description: GetServiceDescription, ) -> List[Service]: """Read automatically discovered checks of one host""" path = _autochecks_path_for(hostname) try: autochecks_raw = _load_raw_autochecks( path=path, check_variables=None, ) except SyntaxError as e: console.verbose("Syntax error in file %s: %s\n", path, e, stream=sys.stderr) if cmk.utils.debug.enabled(): raise return [] except Exception as e: console.verbose("Error in file %s:\n%s\n", path, e, stream=sys.stderr) if cmk.utils.debug.enabled(): raise return [] services: List[Service] = [] for entry in autochecks_raw: try: item = entry["item"] except TypeError: # pre 1.6 tuple! raise MKGeneralException( "Invalid check entry '%r' of host '%s' (%s) found. This " "entry is in pre Checkmk 1.6 format and needs to be converted. This is " "normally done by \"cmk-update-config -v\" during \"omd update\". Please " "execute \"cmk-update-config -v\" for convertig the old configuration." % (entry, hostname, path)) try: plugin_name = CheckPluginName( maincheckify(entry["check_plugin_name"])) assert item is None or isinstance(item, str) except Exception: raise MKGeneralException( "Invalid check entry '%r' of host '%s' (%s) found. This " "entry is in pre Checkmk 1.7 format and needs to be converted. This is " "normally done by \"cmk-update-config -v\" during \"omd update\". Please " "execute \"cmk-update-config -v\" for convertig the old configuration." % (entry, hostname, path)) labels = DiscoveredServiceLabels() for label_id, label_value in entry["service_labels"].items(): labels.add_label(ServiceLabel(label_id, label_value)) try: description = service_description(hostname, plugin_name, item) except Exception: continue # ignore services.append( Service( check_plugin_name=plugin_name, item=item, description=description, parameters=entry["parameters"], service_labels=labels, )) return services
def do_check( hostname: HostName, ipaddress: Optional[HostAddress], only_check_plugin_names: Optional[Set[CheckPluginName]] = None ) -> Tuple[int, List[ServiceDetails], List[ServiceAdditionalDetails], List[str]]: cpu_tracking.start("busy") console.verbose("Check_MK version %s\n", cmk_version.__version__) config_cache = config.get_config_cache() host_config = config_cache.get_host_config(hostname) exit_spec = host_config.exit_code_spec() status: ServiceState = 0 infotexts: List[ServiceDetails] = [] long_infotexts: List[ServiceAdditionalDetails] = [] perfdata: List[str] = [] try: # In case of keepalive we always have an ipaddress (can be 0.0.0.0 or :: when # address is unknown). When called as non keepalive ipaddress may be None or # is already an address (2nd argument) if ipaddress is None and not host_config.is_cluster: ipaddress = ip_lookup.lookup_ip_address(hostname) item_state.load(hostname) services = _get_filtered_services( host_name=hostname, belongs_to_cluster=len(config_cache.clusters_of(hostname)) > 0, config_cache=config_cache, only_check_plugins=only_check_plugin_names, ) # see which raw sections we may need selected_raw_sections = config.get_relevant_raw_sections( CheckPluginName(maincheckify(s.check_plugin_name)) for s in services) sources = data_sources.DataSources( hostname, ipaddress, sources=data_sources.make_sources( host_config, ipaddress, selected_raw_sections=selected_raw_sections, ), ) num_success, plugins_missing_data = _do_all_checks_on_host( services, sources, host_config, ipaddress, only_check_plugin_names, ) if _submit_to_core: item_state.save(hostname) for source in sources: source_state, source_output, source_perfdata = source.get_summary_result_for_checking() if source_output != "": status = max(status, source_state) infotexts.append("[%s] %s" % (source.id(), source_output)) perfdata.extend([_convert_perf_data(p) for p in source_perfdata]) if plugins_missing_data: missing_data_status, missing_data_infotext = _check_plugins_missing_data( plugins_missing_data, exit_spec, bool(num_success), ) status = max(status, missing_data_status) infotexts.append(missing_data_infotext) cpu_tracking.end() phase_times = cpu_tracking.get_times() total_times = phase_times["TOTAL"] run_time = total_times[4] infotexts.append("execution time %.1f sec" % run_time) if config.check_mk_perfdata_with_times: perfdata += [ "execution_time=%.3f" % run_time, "user_time=%.3f" % total_times[0], "system_time=%.3f" % total_times[1], "children_user_time=%.3f" % total_times[2], "children_system_time=%.3f" % total_times[3], ] for phase, times in phase_times.items(): if phase in ["agent", "snmp", "ds"]: t = times[4] - sum(times[:4]) # real time - CPU time perfdata.append("cmk_time_%s=%.3f" % (phase, t)) else: perfdata.append("execution_time=%.3f" % run_time) return status, infotexts, long_infotexts, perfdata finally: if _checkresult_file_fd is not None: _close_checkresult_file() # "ipaddress is not None": At least when working with a cluster host it seems the ipaddress # may be None. This needs to be understood in detail and cleaned up. As the InlineSNMP # stats feature is a very rarely used debugging feature, the analyzation and fix is # postponed now. if config.record_inline_snmp_stats \ and ipaddress is not None \ and host_config.snmp_config(ipaddress).is_inline_snmp_host: inline.snmp_stats_save()
def _is_checkname_valid(self, checkname: CheckPluginNameStr) -> bool: # TODO (mo): centralize maincheckify: CMK-4295 plugin_name = CheckPluginName(maincheckify(checkname)) return config.get_registered_check_plugin(plugin_name) is not None
def test_all_keys_migrated(): for key in check_metrics: if key.startswith("check_mk-"): assert key[9:] == maincheckify(key[9:])
def _parse_pre_20_check_plugin_name(raw_name: object) -> str: try: assert isinstance(raw_name, str) return maincheckify(raw_name) except AssertionError: raise TypeError(f"Invalid autocheck: Check plugin type: {raw_name!r}")