def test_get_effective_service_level(monkeypatch): ts = Scenario().add_host("testhost1") ts.add_host("testhost2") ts.add_host("testhost3") ts.set_ruleset( "host_service_levels", [ (10, [], ["testhost2"], {}), (2, [], ["testhost2"], {}), ], ) ts.set_ruleset( "service_service_levels", [ (33, [], ["testhost1"], ["CPU load$"], {}), ], ) ts.apply(monkeypatch) check_api_utils.set_service("cpu.loads", "CPU load") check_api_utils.set_hostname("testhost1") assert check_api.get_effective_service_level() == 33 check_api_utils.set_hostname("testhost2") assert check_api.get_effective_service_level() == 10 check_api_utils.set_hostname("testhost3") assert check_api.get_effective_service_level() == 0
def get_precompiled_check_table(hostname, remove_duplicates=True, filter_mode=None, skip_ignored=True): # type: (str, bool, Optional[str], bool) -> List[Service] """The precompiled check table is somehow special compared to the regular check table. a) It is sorted by the service dependencies (which are only relevant for Nagios). The sorting is important here to send the state updates to Nagios in the correct order. Sending the updates in this order gives Nagios a consistent state in a shorter time. b) More important: Some special checks pre-compue a new set of parameters using a plugin specific precompile_params function. It's purpose is to perform time consuming ruleset evaluations once without the need to perform it during each check execution. The effective check parameters are calculated in these steps: 1. Read from config a) autochecks + cmk_base.config.compute_check_parameters() b) static checks 2. Execute the precompile params function The precompile_params function can base on the "params" from a static check or autocheck and computes a new "params". This is the last step that may be cached across the single executions. 3. Execute the check During check execution will update the check parameters once more with checking.determine_check_params() right before execution the check. """ host_checks = _get_sorted_check_table(hostname, remove_duplicates, filter_mode=filter_mode, skip_ignored=skip_ignored) services = [] # type: List[Service] for service in host_checks: # make these globals available to the precompile function check_api_utils.set_service(service.check_plugin_name, service.description) item_state.set_item_state_prefix(service.check_plugin_name, service.item) precompiled_parameters = get_precompiled_check_parameters( hostname, service.item, service.parameters, service.check_plugin_name) services.append( Service(service.check_plugin_name, service.item, service.description, precompiled_parameters, service.service_labels)) return services
def get_precompiled_check_table(hostname, remove_duplicates=True, filter_mode=None, skip_ignored=True): host_checks = get_sorted_check_table( hostname, remove_duplicates, filter_mode=filter_mode, skip_ignored=skip_ignored) precomp_table = [] for check_plugin_name, item, params, description, _unused_deps in host_checks: # make these globals available to the precompile function check_api_utils.set_service(check_plugin_name, description) item_state.set_item_state_prefix(check_plugin_name, item) params = get_precompiled_check_parameters(hostname, item, params, check_plugin_name) precomp_table.append((check_plugin_name, item, params, description)) # deps not needed while checking return precomp_table
def get_precompiled_check_table(hostname, remove_duplicates=True, filter_mode=None, skip_ignored=True): """The precompiled check table is somehow special compared to the regular check table. a) It is sorted by the service dependencies (which are only relevant for Nagios) b) More important: Some special checks pre-compue a new set of parameters using a plugin specific precompile_params function. It's purpose is to perform time consuming ruleset evaluations once without the need to perform it during each check execution. The effective check parameters are calculated in these steps: 1. Read from config a) autochecks + cmk_base.config.compute_check_parameters() b) static checks 2. Execute the precompile params function The precompile_params function can base on the "params" from a static check or autocheck and computes a new "params". This is the last step that may be cached across the single executions. 3. Execute the check During check execution will update the check parameters once more with checking.determine_check_params() right before execution the check. """ host_checks = get_sorted_check_table(hostname, remove_duplicates, filter_mode=filter_mode, skip_ignored=skip_ignored) precomp_table = [] for check_plugin_name, item, params, description in host_checks: # make these globals available to the precompile function check_api_utils.set_service(check_plugin_name, description) item_state.set_item_state_prefix(check_plugin_name, item) params = get_precompiled_check_parameters(hostname, item, params, check_plugin_name) precomp_table.append((check_plugin_name, item, params, description)) # deps not needed while checking return precomp_table
def execute_check(multi_host_sections, hostname, ipaddress, check_plugin_name, item, params, description): # Make a bit of context information globally available, so that functions # called by checks now this context check_api_utils.set_service(check_plugin_name, description) item_state.set_item_state_prefix(check_plugin_name, item) # Skip checks that are not in their check period period = config.check_period_of(hostname, description) if period and not cmk_base.core.check_timeperiod(period): console.verbose( "Skipping service %s: currently not in timeperiod %s.\n" % (description, period)) return None elif period: console.vverbose("Service %s: timeperiod %s is currently active.\n" % (description, period)) section_name = cmk_base.check_utils.section_name_of(check_plugin_name) dont_submit = False section_content = None try: # TODO: There is duplicate code with discovery._execute_discovery(). Find a common place! try: section_content = multi_host_sections.get_section_content( hostname, ipaddress, section_name, for_discovery=False, service_description=description) except MKParseFunctionError as e: x = e.exc_info() # re-raise the original exception to not destory the trace. This may raise a MKCounterWrapped # exception which need to lead to a skipped check instead of a crash raise x[0], x[1], x[2] # TODO: Move this to a helper function if section_content is None: # No data for this check type return False # In case of SNMP checks but missing agent response, skip this check. # Special checks which still need to be called even with empty data # may declare this. if not section_content and cmk_base.check_utils.is_snmp_check(check_plugin_name) \ and not config.check_info[check_plugin_name]["handle_empty_info"]: return False check_function = config.check_info[check_plugin_name].get( "check_function") if check_function is None: check_function = lambda item, params, section_content: ( 3, 'UNKNOWN - Check not implemented') # Call the actual check function item_state.reset_wrapped_counters() raw_result = check_function(item, determine_check_params(params), section_content) result = sanitize_check_result( raw_result, cmk_base.check_utils.is_snmp_check(check_plugin_name)) item_state.raise_counter_wrap() except item_state.MKCounterWrapped as e: # handle check implementations that do not yet support the # handling of wrapped counters via exception on their own. # Do not submit any check result in that case: console.verbose("%-20s PEND - Cannot compute check result: %s\n" % (description, e)) dont_submit = True except MKTimeout: raise except Exception as e: if cmk.utils.debug.enabled(): raise result = 3, cmk_base.crash_reporting.create_crash_dump( hostname, check_plugin_name, item, is_manual_check(hostname, check_plugin_name, item), params, description, section_content), [] if not dont_submit: # Now add information about the age of the data in the agent # sections. This is in data_sources.g_agent_cache_info. For clusters we # use the oldest of the timestamps, of course. oldest_cached_at = None largest_interval = None def minn(a, b): if a is None: return b elif b is None: return a return min(a, b) for host_sections in multi_host_sections.get_host_sections().values(): section_entries = host_sections.cache_info if section_name in section_entries: cached_at, cache_interval = section_entries[section_name] oldest_cached_at = minn(oldest_cached_at, cached_at) largest_interval = max(largest_interval, cache_interval) _submit_check_result(hostname, description, result, cached_at=oldest_cached_at, cache_interval=largest_interval) return True