def test_cluster_check(monkeypatch): monkeypatch.setattr( livestatus_status, "get_value_store", lambda: { "host_checks": [1, 2], "service_checks": [1, 2], "forks": [1, 2], "connections": [1, 2], "requests": [1, 2], "log_messages": [1, 2], }, ) monkeypatch.setattr( livestatus_status.time, "time", lambda: 581785200, ) yielded_results = list( livestatus_status.cluster_check_livestatus_status( "heute", Parameters(livestatus_status.livestatus_status_default_levels), {"node1": PARSED_STATUS}, {"node1": PARSED_SSL}, )) assert yielded_results == _RESULTS
def _check_preview_table_row( *, host_config: config.HostConfig, ip_address: Optional[HostAddress], service: Service, check_source: str, parsed_sections_broker: ParsedSectionsBroker, found_on_nodes: List[HostName], value_store_manager: ValueStoreManager, ) -> CheckPreviewEntry: plugin = agent_based_register.get_check_plugin(service.check_plugin_name) params = _preview_params(host_config.hostname, service, plugin, check_source) if check_source in ['legacy', 'active', 'custom']: exitcode = None output = u"WAITING - %s check, cannot be done offline" % check_source.title( ) ruleset_name: Optional[RulesetName] = None else: ruleset_name = (str(plugin.check_ruleset_name) if plugin and plugin.check_ruleset_name else None) wrapped_params = (Parameters(wrap_parameters(params)) if plugin and plugin.check_default_parameters is not None else None) exitcode, output, _perfdata = checking.get_aggregated_result( parsed_sections_broker, host_config, ip_address, service, plugin, lambda p=wrapped_params: p, # type: ignore[misc] # "type of lambda" value_store_manager=value_store_manager, persist_value_store_changes=False, # never during discovery ).result # Service discovery never uses the perfdata in the check table. That entry # is constantly discarded, yet passed around(back and forth) as part of the # discovery result in the request elements. Some perfdata VALUES are not parsable # by ast.literal_eval such as "inf" it lead to ValueErrors. Thus keep perfdata empty perfdata: List[MetricTuple] = [] return ( _preview_check_source(host_config.hostname, service, check_source), str(service.check_plugin_name), ruleset_name, service.item, service.parameters, params, service.description, exitcode, output, perfdata, service.service_labels.to_dict(), found_on_nodes, )
def _final_read_only_check_parameters(entries: LegacyCheckParameters) -> Parameters: raw_parameters = (time_resolved_check_parameters(entries) if isinstance( entries, cmk.base.config.TimespecificParamList) else entries) # TODO (mo): this needs cleaning up, once we've gotten rid of tuple parameters. # wrap_parameters is a no-op for dictionaries. # For auto-migrated plugins expecting tuples, they will be # unwrapped by a decorator of the original check_function. return Parameters(wrap_parameters(raw_parameters))
def determine_check_params(entries: LegacyCheckParameters) -> Parameters: # TODO (mo): obviously, we do not want to keep legacy_determine_check_params # around in the long run. This needs cleaning up, once we've gotten # rid of tuple parameters. params = legacy_determine_check_params(entries) # wrap_parameters is a no-op for dictionaries. # For auto-migrated plugins expecting tuples, they will be # unwrapped by a decorator of the original check_function. return Parameters(wrap_parameters(params))
def _final_read_only_check_parameters( entries: Union[TimespecificParameters, LegacyCheckParameters]) -> Parameters: raw_parameters = (entries.evaluate(cmk.base.core.timeperiod_active) if isinstance(entries, TimespecificParameters) else entries) # TODO (mo): this needs cleaning up, once we've gotten rid of tuple parameters. # wrap_parameters is a no-op for dictionaries. # For auto-migrated plugins expecting tuples, they will be # unwrapped by a decorator of the original check_function. return Parameters(wrap_parameters(raw_parameters))
def test_check_new_counters_in_oldstabe(fetcher_checker_counters): yielded_results = list( livestatus_status._generate_livestatus_results( "oldstable", Parameters(livestatus_status.livestatus_status_default_levels), PARSED_STATUS, PARSED_SSL, { "host_checks": [1, 2], "service_checks": [1, 2], "forks": [1, 2], "connections": [1, 2], "requests": [1, 2], "log_messages": [1, 2], }, 581785200, )) assert all(x in yielded_results for x in fetcher_checker_counters)
def test_check(): yielded_results = list( livestatus_status._generate_livestatus_results( "heute", Parameters(livestatus_status.livestatus_status_default_levels), PARSED_STATUS, PARSED_SSL, { "host_checks": [1, 2], "service_checks": [1, 2], "forks": [1, 2], "connections": [1, 2], "requests": [1, 2], "log_messages": [1, 2], }, 581785200, )) assert yielded_results == _RESULTS
def test_check(): yielded_results = list( livestatus_status._generate_livestatus_results( "heute", Parameters(livestatus_status.livestatus_status_default_levels), PARSED_STATUS, PARSED_SSL, { "host_checks": [1, 2], "service_checks": [1, 2], "forks": [1, 2], "connections": [1, 2], "requests": [1, 2], "log_messages": [1, 2], }, 581785200, )) assert yielded_results == [ Result(state=state.OK, summary='Livestatus version: 2019.05.31'), Result(state=state.OK, summary='Host checks: 0.0/s'), Metric('host_checks', 7.615869237677187e-05), Result(state=state.OK, summary='Service checks: 0.0/s'), Metric('service_checks', 0.0002685888198403617), Result(state=state.OK, notice='Process creations: -0.0/s'), Metric('forks', -3.4376948802370615e-09), Result(state=state.OK, notice='Livestatus connects: 0.0/s'), Metric('connections', 6.261761224351807e-06), Result(state=state.OK, notice='Livestatus requests: 0.0/s'), Metric('requests', 8.090614900637924e-06), Result(state=state.OK, notice='Log messages: 0.0/s'), Metric('log_messages', 1.5985281193102335e-06), Result(state=state.OK, notice='Average check latency: 0.000s'), Metric('average_latency_generic', 2.23711e-06, levels=(30.0, 60.0)), Result(state=state.OK, notice='Average Checkmk latency: 0.000s'), Metric('average_latency_cmk', 2.01088e-05, levels=(30.0, 60.0)), Result(state=state.OK, notice='Average fetcher latency: 0.000s'), Metric('average_latency_fetcher', 2.01088e-05, levels=(30.0, 60.0)), Result(state=state.OK, notice='Check helper usage: 1.43%'), Metric('helper_usage_generic', 1.42967, levels=(60.0, 90.0)), Result(state=state.OK, notice='Checkmk helper usage: 0.04%'), Metric('helper_usage_cmk', 0.043827200000000004, levels=(60.0, 90.0)), Result(state=state.OK, notice='Fetcher helper usage: 0.04%'), Metric('helper_usage_fetcher', 0.043827200000000004, levels=(40.0, 80.0)), Result(state=state.OK, notice='Checker helper usage: 0.04%'), Metric('helper_usage_checker', 0.043827200000000004, levels=(40.0, 80.0)), Result(state=state.OK, notice='Livestatus usage: 0.00%'), Metric('livestatus_usage', 3.46e-321, levels=(80.0, 90.0)), Result(state=state.OK, notice='Livestatus overflow rate: 0.0/s'), Metric('livestatus_overflows_rate', 0.0, levels=(0.01, 0.02)), Result(state=state.OK, notice='Hosts: 2.00'), Metric('monitored_hosts', 2.0), Result(state=state.OK, notice='Services: 513.00'), Metric('monitored_services', 513.0), Result(state=state.OK, notice='Core version: Checkmk 2019.05.31'), Result( state=state.OK, notice='Site certificate valid until Oct 01 3017', ), Result( state=state.OK, notice='Expiring in: 1029 years 363 days', ), Metric('site_cert_days', 375948.7452314815), ]
def test_parameters_features(): par0 = Parameters({}) par1 = Parameters({'olaf': 'schneemann'}) assert len(par0) == 0 assert len(par1) == 1 assert not par0 assert par1 assert 'olaf' not in par0 assert 'olaf' in par1 assert par0.get('olaf') is None assert par1.get('olaf') == 'schneemann' with pytest.raises(KeyError): _ = par0['olaf'] assert par1['olaf'] == 'schneemann' assert list(par0) == list(par0.keys()) == list(par0.values()) == list( par0.items()) == [] assert list(par1) == list(par1.keys()) == ['olaf'] assert list(par1.values()) == ['schneemann'] assert list(par1.items()) == [('olaf', 'schneemann')]
def test_paramters_invalid(data): with pytest.raises(TypeError, match="expected dict"): _ = Parameters(data)
def get_check_preview( *, host_name: HostName, max_cachefile_age: int, use_cached_snmp_data: bool, on_error: str, ) -> Tuple[CheckPreviewTable, QualifiedDiscovery[HostLabel]]: """Get the list of service of a host or cluster and guess the current state of all services if possible""" config_cache = config.get_config_cache() host_config = config_cache.get_host_config(host_name) ip_address = None if host_config.is_cluster else config.lookup_ip_address( host_config) discovery_parameters = DiscoveryParameters( on_error=on_error, load_labels=True, save_labels=False, only_host_labels=False, ) _set_cache_opts_of_checkers(use_cached_snmp_data=use_cached_snmp_data) parsed_sections_broker, _source_results = make_broker( config_cache=config_cache, host_config=host_config, ip_address=ip_address, mode=Mode.DISCOVERY, file_cache_max_age=max_cachefile_age, selected_sections=NO_SELECTION, fetcher_messages=(), force_snmp_cache_refresh=not use_cached_snmp_data, on_scan_error=on_error, ) grouped_services, host_label_result = _get_host_services( host_config, ip_address, parsed_sections_broker, discovery_parameters, ) table: CheckPreviewTable = [] for check_source, services_with_nodes in grouped_services.items(): for service, found_on_nodes in services_with_nodes: plugin = agent_based_register.get_check_plugin( service.check_plugin_name) params = _preview_params(host_name, service, plugin, check_source) if check_source in ['legacy', 'active', 'custom']: exitcode = None output = u"WAITING - %s check, cannot be done offline" % check_source.title( ) ruleset_name: Optional[RulesetName] = None else: ruleset_name = (str(plugin.check_ruleset_name) if plugin and plugin.check_ruleset_name else None) wrapped_params = ( Parameters(wrap_parameters(params)) if plugin and plugin.check_default_parameters is not None else None) exitcode, output, _perfdata = checking.get_aggregated_result( parsed_sections_broker, host_config, ip_address, service, plugin, lambda p=wrapped_params: p, # type: ignore[misc] # "type of lambda" ).result # Service discovery never uses the perfdata in the check table. That entry # is constantly discarded, yet passed around(back and forth) as part of the # discovery result in the request elements. Some perfdata VALUES are not parsable # by ast.literal_eval such as "inf" it lead to ValueErrors. Thus keep perfdata empty perfdata: List[MetricTuple] = [] table.append(( _preview_check_source(host_name, service, check_source), str(service.check_plugin_name), ruleset_name, service.item, service.parameters, params, service.description, exitcode, output, perfdata, service.service_labels.to_dict(), found_on_nodes, )) return table, host_label_result
def test_sap_hana_license_check(cur_item, result): yielded_results = list( sap_hana_license.check_sap_hana_license(cur_item, Parameters({}), SECTION)) assert yielded_results == result
def test_parameters_features(): par0 = Parameters({}) par1 = Parameters({"olaf": "schneemann"}) assert repr(par1) == "Parameters({'olaf': 'schneemann'})" assert len(par0) == 0 assert len(par1) == 1 assert not par0 assert par1 assert "olaf" not in par0 assert "olaf" in par1 assert par0.get("olaf") is None assert par1.get("olaf") == "schneemann" with pytest.raises(KeyError): _ = par0["olaf"] assert par1["olaf"] == "schneemann" assert list(par0) == list(par0.keys()) == list(par0.values()) == list( par0.items()) == [] assert list(par1) == list(par1.keys()) == ["olaf"] assert list(par1.values()) == ["schneemann"] assert list(par1.items()) == [("olaf", "schneemann")]