def check_result_generator(*args, **kwargs): assert not args, "pass arguments as keywords to check function" if "params" in kwargs: kwargs["params"] = unwrap_parameters(kwargs["params"]) item_state.reset_wrapped_counters() # not supported by the new API! subresults = sig_function(**kwargs) if isinstance(subresults, tuple): # just one result subresults = [subresults] # Once we have seen a newline in *any* subresult, # all remaining output is sent to the details page! # I'm not saying that is good, but we stay compatible. opt_newline = "" for subresult in subresults: opt_newline = yield from _create_new_result(opt_newline, *subresult) item_state.raise_counter_wrap()
def check_result_generator(*args, **kwargs): assert not args, "pass arguments as keywords to check function" assert "params" in kwargs, "'params' is missing in kwargs: %r" % ( kwargs, ) parameters = kwargs["params"] if isinstance(parameters, Parameters): # In the new API check_functions will be passed an immutable mapping # instead of a dict. However, we have way too many 'if isinsance(params, dict)' # call sites to introduce this into legacy code, so use the plain dict. parameters = copy.deepcopy(parameters._data) kwargs["params"] = unwrap_parameters(parameters) item_state.reset_wrapped_counters() # not supported by the new API! try: subresults = sig_function(**kwargs) except TypeError: # this handles a very weird case, in which check plugins do not have an '%s' # in their description (^= no item) but do in fact discover an empty string. # We cannot just append "%s" to the service description, because in that case # our tests complain about the ruleset not being for plugins with item :-( # Just retry without item: subresults = sig_function( **{k: v for k, v in kwargs.items() if k != "item"}) if subresults is None: return if isinstance(subresults, tuple): # just one result subresults = [subresults] # Once we have seen a newline in *any* subresult, # all remaining output is sent to the details page! # I'm not saying that is good, but we stay compatible. is_details = False for subresult in subresults: is_details = yield from _create_new_result(is_details, *subresult) item_state.raise_counter_wrap()
def _execute_check_legacy_mode(multi_host_sections: MultiHostSections, hostname: HostName, ipaddress: Optional[HostAddress], service: Service) -> bool: legacy_check_plugin_name = config.legacy_check_plugin_names.get(service.check_plugin_name) if legacy_check_plugin_name is None: _submit_check_result(hostname, service.description, CHECK_NOT_IMPLEMENTED, None) return True check_function = config.check_info[legacy_check_plugin_name].get("check_function") if check_function is None: _submit_check_result(hostname, service.description, CHECK_NOT_IMPLEMENTED, None) return True # Make a bit of context information globally available, so that functions # called by checks know this context. check_api_utils.set_service has # already been called. item_state.set_item_state_prefix(str(service.check_plugin_name), service.item) section_name = legacy_check_plugin_name.split('.')[0] section_content = None mgmt_board_info = config.get_management_board_precedence(section_name, config.check_info) source_type = SourceType.MANAGEMENT if mgmt_board_info == LEGACY_MGMT_ONLY else SourceType.HOST try: section_content = multi_host_sections.get_section_content( HostKey(hostname, ipaddress, source_type), mgmt_board_info, section_name, for_discovery=False, cluster_node_keys=config.get_config_cache().get_clustered_service_node_keys( hostname, source_type, service.description, ip_lookup.lookup_ip_address, ), check_legacy_info=config.check_info, ) # TODO: Move this to a helper function if section_content is None: # No data for this check type return False # Call the actual check function item_state.reset_wrapped_counters() used_params = legacy_determine_check_params(service.parameters) raw_result = check_function(service.item, used_params, section_content) result = sanitize_check_result(raw_result) item_state.raise_counter_wrap() except item_state.MKCounterWrapped as e: # handle check implementations that do not yet support the # handling of wrapped counters via exception on their own. # Do not submit any check result in that case: console.verbose("%-20s PEND - Cannot compute check result: %s\n", ensure_str(service.description), e) # Don't submit to core - we're done. return True except MKTimeout: raise except Exception: if cmk.utils.debug.enabled(): raise result = 3, cmk.base.crash_reporting.create_check_crash_dump( hostname, service.check_plugin_name, { "item": service.item, "params": used_params, "section_content": section_content }, is_manual_check(hostname, service.id()), service.description, ), [] _submit_check_result( hostname, service.description, result, _legacy_determine_cache_info(multi_host_sections, SectionName(section_name)), ) return True
def execute_check(config_cache, multi_host_sections, hostname, ipaddress, check_plugin_name, item, params, description): # type: (config.ConfigCache, data_sources.MultiHostSections, HostName, Optional[HostAddress], CheckPluginName, Item, CheckParameters, ServiceName) -> Optional[bool] # Make a bit of context information globally available, so that functions # called by checks now this context check_api_utils.set_service(check_plugin_name, description) item_state.set_item_state_prefix(check_plugin_name, item) # Skip checks that are not in their check period period = config_cache.check_period_of_service(hostname, description) if period is not None: if not cmk.base.core.check_timeperiod(period): console.verbose( "Skipping service %s: currently not in timeperiod %s.\n", six.ensure_str(description), period) return None console.vverbose("Service %s: timeperiod %s is currently active.\n", six.ensure_str(description), period) section_name = cmk.base.check_utils.section_name_of(check_plugin_name) dont_submit = False section_content = None try: # TODO: There is duplicate code with discovery._execute_discovery(). Find a common place! try: section_content = multi_host_sections.get_section_content( hostname, ipaddress, section_name, for_discovery=False, service_description=description) except MKParseFunctionError as e: x = e.exc_info() # re-raise the original exception to not destory the trace. This may raise a MKCounterWrapped # exception which need to lead to a skipped check instead of a crash # TODO CMK-3729, PEP-3109 new_exception = x[0](x[1]) new_exception.__traceback__ = x[2] # type: ignore[attr-defined] raise new_exception # TODO: Move this to a helper function if section_content is None: # No data for this check type return False # In case of SNMP checks but missing agent response, skip this check. # TODO: This feature predates the 'parse_function', and is not needed anymore. # # Special checks which still need to be called even with empty data # # may declare this. if not section_content and cmk.base.check_utils.is_snmp_check(check_plugin_name) \ and not config.check_info[check_plugin_name]["handle_empty_info"]: return False check_function = config.check_info[check_plugin_name].get( "check_function") if check_function is None: check_function = lambda item, params, section_content: ( 3, 'UNKNOWN - Check not implemented') # Call the actual check function item_state.reset_wrapped_counters() raw_result = check_function(item, determine_check_params(params), section_content) result = sanitize_check_result( raw_result, cmk.base.check_utils.is_snmp_check(check_plugin_name)) item_state.raise_counter_wrap() except item_state.MKCounterWrapped as e: # handle check implementations that do not yet support the # handling of wrapped counters via exception on their own. # Do not submit any check result in that case: console.verbose("%-20s PEND - Cannot compute check result: %s\n", six.ensure_str(description), e) dont_submit = True except MKTimeout: raise except Exception as e: if cmk.utils.debug.enabled(): raise result = 3, cmk.base.crash_reporting.create_check_crash_dump( hostname, check_plugin_name, item, is_manual_check(hostname, check_plugin_name, item), params, description, section_content), [] if not dont_submit: # Now add information about the age of the data in the agent # sections. This is in data_sources.g_agent_cache_info. For clusters we # use the oldest of the timestamps, of course. oldest_cached_at = None largest_interval = None def minn(a, b): # type: (Optional[int], Optional[int]) -> Optional[int] if a is None: return b if b is None: return a return min(a, b) for host_sections in multi_host_sections.get_host_sections().values(): section_entries = host_sections.cache_info if section_name in section_entries: cached_at, cache_interval = section_entries[section_name] oldest_cached_at = minn(oldest_cached_at, cached_at) largest_interval = max(largest_interval, cache_interval) _submit_check_result(hostname, description, result, cached_at=oldest_cached_at, cache_interval=largest_interval) return True
def execute_check(multi_host_sections, hostname, ipaddress, service): # type: (data_sources.MultiHostSections, HostName, Optional[HostAddress], Service) -> bool check_function = config.check_info[service.check_plugin_name].get( "check_function") if check_function is None: _submit_check_result(hostname, service.description, CHECK_NOT_IMPLEMENTED, (None, None)) return True # Make a bit of context information globally available, so that functions # called by checks now this context check_api_utils.set_service(service.check_plugin_name, service.description) item_state.set_item_state_prefix(service.check_plugin_name, service.item) section_name = cmk.base.check_utils.section_name_of( service.check_plugin_name) section_content = None try: # TODO: There is duplicate code with discovery._execute_discovery(). Find a common place! try: section_content = multi_host_sections.get_section_content( hostname, ipaddress, section_name, for_discovery=False, service_description=service.description) except MKParseFunctionError as e: x = e.exc_info() # re-raise the original exception to not destory the trace. This may raise a MKCounterWrapped # exception which need to lead to a skipped check instead of a crash # TODO CMK-3729, PEP-3109 new_exception = x[0](x[1]) new_exception.__traceback__ = x[2] # type: ignore[attr-defined] raise new_exception # TODO: Move this to a helper function if section_content is None: # No data for this check type return False # Call the actual check function item_state.reset_wrapped_counters() raw_result = check_function(service.item, determine_check_params(service.parameters), section_content) result = sanitize_check_result(raw_result) item_state.raise_counter_wrap() except item_state.MKCounterWrapped as e: # handle check implementations that do not yet support the # handling of wrapped counters via exception on their own. # Do not submit any check result in that case: console.verbose("%-20s PEND - Cannot compute check result: %s\n", six.ensure_str(service.description), e) # Don't submit to core - we're done. return True except MKTimeout: raise except Exception: if cmk.utils.debug.enabled(): raise result = 3, cmk.base.crash_reporting.create_check_crash_dump( hostname, service.check_plugin_name, service.item, is_manual_check(hostname, service.check_plugin_name, service.item), service.parameters, service.description, section_content), [] _submit_check_result( hostname, service.description, result, determine_cache_info(multi_host_sections, section_name), ) return True
def _get_aggregated_result( *, parsed_sections_broker: ParsedSectionsBroker, hostname: HostName, ipaddress: Optional[HostAddress], service: Service, used_params: LegacyCheckParameters, ) -> AggregatedResult: legacy_check_plugin_name = config.legacy_check_plugin_names.get(service.check_plugin_name) if legacy_check_plugin_name is None: return AggregatedResult( submit=True, data_received=True, result=CHECK_NOT_IMPLEMENTED, cache_info=None, ) check_function = config.check_info[legacy_check_plugin_name].get("check_function") if check_function is None: return AggregatedResult( submit=True, data_received=True, result=CHECK_NOT_IMPLEMENTED, cache_info=None, ) section_name = legacy_check_plugin_name.split('.')[0] main_check_info = config.check_info.get(section_name, {}) section_content = None multi_host_sections = _MultiHostSections(parsed_sections_broker) mgmt_board_info = main_check_info.get("management_board") or LEGACY_HOST_PRECEDENCE source_type = SourceType.MANAGEMENT if mgmt_board_info == LEGACY_MGMT_ONLY else SourceType.HOST try: section_content = multi_host_sections.get_section_content( HostKey(hostname, ipaddress, source_type), mgmt_board_info, section_name, for_discovery=False, cluster_node_keys=config.get_config_cache().get_clustered_service_node_keys( hostname, source_type, service.description, ), check_legacy_info=config.check_info, ) if section_content is None: # No data for this check type return AggregatedResult( submit=False, data_received=False, result=RECEIVED_NO_DATA, cache_info=None, ) # Call the actual check function item_state.reset_wrapped_counters() raw_result = check_function(service.item, used_params, section_content) result = _sanitize_check_result(raw_result) item_state.raise_counter_wrap() except item_state.MKCounterWrapped as exc: # handle check implementations that do not yet support the # handling of wrapped counters via exception on their own. # Do not submit any check result in that case: return AggregatedResult( submit=False, data_received=True, result=(0, f"Cannot compute check result: {exc}\n", []), cache_info=None, ) except MKTimeout: raise except Exception: if cmk.utils.debug.enabled(): raise result = 3, cmk.base.crash_reporting.create_check_crash_dump( host_name=hostname, service_name=service.description, plugin_name=service.check_plugin_name, plugin_kwargs={ "item": service.item, "params": used_params, "section_content": section_content }, is_manual=service.id() in check_table.get_check_table(hostname, skip_autochecks=True), ), [] return AggregatedResult( submit=True, data_received=True, result=result, cache_info=multi_host_sections.legacy_determine_cache_info(SectionName(section_name)), )