def _agent_cache_file_age( hostname: HostName, check_plugin_name: CheckPluginNameStr) -> Optional[float]: host_config = _config.get_config_cache().get_host_config(hostname) if host_config.is_cluster: raise MKGeneralException("get_agent_data_time() not valid for cluster") # NOTE: This is a workaround for the 'old' API and will not be correct # for the new one. This is a check plugin name, and the property of being # 'TCP' or 'SNMP' is a property of the section. # This function is deprecated for new plugins. # For old-style plugins, plugin and section name are same, so check the # corresponding section: section_name_str = _cmk_utils.check_utils.section_name_of( check_plugin_name) section = _agent_based_register.get_section_plugin( _SectionName(section_name_str)) if hasattr(section, "trees"): cachefile = "%s/%s.%s" % (_paths.tcp_cache_dir, hostname, section_name_str) else: cachefile = "%s/%s" % (_paths.tcp_cache_dir, hostname) if os.path.exists(cachefile): return _cmk_utils.cachefile_age(cachefile) return None
def test_inventory_sap_hana_instance_status(info, expected_result): section_name = SectionName("sap_hana_instance_status") section = register.get_section_plugin(section_name).parse_function(info) plugin_name = CheckPluginName("sap_hana_instance_status") plugin = register.get_check_plugin(plugin_name) if plugin: assert list(plugin.discovery_function(section)) == expected_result
def _extract_snmp_sections( inf_info: Dict[InventoryPluginNameStr, InventoryInfo], plugin_file_lookup: Dict[str, str], ) -> None: for plugin_name, plugin_info in sorted(inv_info.items()): if 'snmp_info' not in plugin_info: continue section_name = section_name_of(plugin_name) if isinstance(agent_based_register.get_section_plugin(SectionName(section_name)), SNMPSectionPlugin): continue fallback_files = ([_include_file_path(i) for i in plugin_info.get('includes', [])] + [plugin_file_lookup[plugin_name]]) try: agent_based_register.add_section_plugin( create_snmp_section_plugin_from_legacy( section_name, {}, plugin_info['snmp_scan_function'], plugin_info['snmp_info'], scan_function_fallback_files=fallback_files, )) except (NotImplementedError, KeyError, AssertionError, ValueError): msg = config.AUTO_MIGRATION_ERR_MSG % ('section', plugin_name) if cmk.utils.debug.enabled(): raise MKGeneralException(msg) console.warning(msg)
def make_broker( *, fetched: Sequence[Tuple[Source, FetcherMessage]], selected_sections: SectionNameCollection, file_cache_max_age: cache.MaxAge, ) -> Tuple[ParsedSectionsBroker, SourceResults]: collected_host_sections, results = _collect_host_sections( fetched=fetched, selected_sections=selected_sections, file_cache_max_age=file_cache_max_age, ) return ( ParsedSectionsBroker({ host_key: ( ParsedSectionsResolver(section_plugins=[ agent_based_register.get_section_plugin(section_name) for section_name in host_sections.sections ], ), SectionsParser(host_sections=host_sections, host_name=host_key.hostname), ) for host_key, host_sections in collected_host_sections.items() }), results, )
def _extract_snmp_sections( inf_info: Dict[InventoryPluginNameStr, InventoryInfo], plugin_file_lookup: Dict[str, str], ) -> List[str]: errors = [] for plugin_name, plugin_info in sorted(inv_info.items()): if 'snmp_info' not in plugin_info: continue section_name = section_name_of(plugin_name) if isinstance(agent_based_register.get_section_plugin(SectionName(section_name)), SNMPSectionPlugin): continue fallback_files = ([_include_file_path(i) for i in plugin_info.get('includes', [])] + [plugin_file_lookup[plugin_name]]) try: agent_based_register.add_section_plugin( create_snmp_section_plugin_from_legacy( section_name, {}, plugin_info['snmp_scan_function'], plugin_info['snmp_info'], scan_function_fallback_files=fallback_files, # We have to validate, because we read inventory plugin files # directly, and do not know whether they changed. validate_creation_kwargs=True, )) except (NotImplementedError, KeyError, AssertionError, ValueError): msg = config.AUTO_MIGRATION_ERR_MSG % ('section', plugin_name) if cmk.utils.debug.enabled(): raise MKGeneralException(msg) errors.append(msg) return errors
def test_check_sap_hana_db_status(item, info, expected_result): section_name = SectionName("sap_hana_db_status") section = register.get_section_plugin(section_name).parse_function(info) plugin_name = CheckPluginName("sap_hana_db_status") plugin = register.get_check_plugin(plugin_name) if plugin: assert list(plugin.check_function(item, section)) == expected_result
def test_check_sap_hana_diskusage(value_store_patch, item, info, expected_result): section_name = SectionName("sap_hana_diskusage") section = register.get_section_plugin(section_name).parse_function(info) plugin_name = CheckPluginName("sap_hana_diskusage") plugin = register.get_check_plugin(plugin_name) if plugin: assert list(plugin.check_function(item, {}, section)) == expected_result
def test_cmciii_leakage_sensors(status, position, expected): section_plugin = agent_based_register.get_section_plugin(SectionName('cmciii')) assert section_plugin plugin = agent_based_register.get_check_plugin(CheckPluginName('cmciii_leakage')) assert plugin item, info = _leakage_info(status, position) section = section_plugin.parse_function(info) assert list(plugin.check_function(item=item, params={}, section=section)) == expected
def make_broker( *, config_cache: ConfigCache, host_config: HostConfig, ip_address: Optional[HostAddress], mode: Mode, selected_sections: SectionNameCollection, file_cache_max_age: cache.MaxAge, fetcher_messages: Sequence[FetcherMessage], force_snmp_cache_refresh: bool, on_scan_error: OnError, ) -> Tuple[ParsedSectionsBroker, SourceResults]: sources = (make_sources( host_config, ip_address, selected_sections=selected_sections, force_snmp_cache_refresh=force_snmp_cache_refresh, on_scan_error=on_scan_error, ) if host_config.nodes is None else make_cluster_sources( config_cache, host_config, )) if not fetcher_messages: # Note: *Not* calling `fetch_all(sources)` here is probably buggy. # Note: `fetch_all(sources)` is almost always called in similar # code in discovery and inventory. The only two exceptions # are `cmk.base.agent_based.checking.active_check_checking(...)` and # `cmk.base.agent_based.discovery.active_check_discovery(...)`. # This does not seem right. fetcher_messages = list( fetch_all( sources=sources, file_cache_max_age=file_cache_max_age, mode=mode, )) collected_host_sections, results = _collect_host_sections( sources=sources, file_cache_max_age=file_cache_max_age, fetcher_messages=fetcher_messages, selected_sections=selected_sections, ) return ( ParsedSectionsBroker({ host_key: ( ParsedSectionsResolver(section_plugins=[ agent_based_register.get_section_plugin(section_name) for section_name in host_sections.sections ], ), SectionsParser(host_sections=host_sections), ) for host_key, host_sections in collected_host_sections.items() }), results, )
def test_cisco_related_snmp_detection(oid_data, detected, not_detected): for name in detected | not_detected: section = agent_based_register.get_section_plugin(SectionName(name)) assert isinstance(section, SNMPSectionPlugin) assert evaluate_snmp_detection( detect_spec=section.detect_spec, oid_value_getter=oid_data.get, ) == (name in detected)
def test_inv_oracle_instance(line, expected_data): section = agent_based_register.get_section_plugin( SectionName('oracle_instance')) assert section parsed = section.parse_function([line]) inv_plugin = agent_based_register.get_inventory_plugin( InventoryPluginName('oracle_instance')) assert inv_plugin assert list(inv_plugin.inventory_function(parsed)) == expected_data
def _update_with_parse_function( section_content: ABCRawDataSection, section_name: SectionName, check_legacy_info: Dict[str, Dict[str, Any]], ) -> ParsedSectionContent: """Transform the section_content using the defined parse functions. Some checks define a parse function that is used to transform the section_content somehow. It is applied by this function. Please note that this is not a check/subcheck individual setting. This option is related to the agent section. All exceptions raised by the parse function will be catched and re-raised as MKParseFunctionError() exceptions.""" # We can use the migrated section: we refuse to migrate sections with # "'node_info'=True", so the auto-migrated ones will keep working. # This function will never be called on checks programmed against the new # API (or migrated manually) if not agent_based_register.is_registered_section_plugin(section_name): # use legacy parse function for unmigrated sections parse_function = check_legacy_info.get(str(section_name), {}).get("parse_function") else: section_plugin = agent_based_register.get_section_plugin( section_name) parse_function = cast( Callable[[ABCRawDataSection], ParsedSectionContent], section_plugin.parse_function) if parse_function is None: return section_content # (mo): ValueStores (formally Item state) need to be *only* available # from within the check function, nowhere else. orig_item_state_prefix = item_state.get_item_state_prefix() try: item_state.set_item_state_prefix(section_name, None) return parse_function(section_content) except item_state.MKCounterWrapped: raise except Exception: if cmk.utils.debug.enabled(): raise raise MKParseFunctionError(*sys.exc_info()) finally: item_state.set_item_state_prefix(*orig_item_state_prefix)
def _section_permutations( parsed_section_names: Sequence[ParsedSectionName], ) -> Generator[Tuple[SectionPlugin, ...], None, None]: if not parsed_section_names: yield () return if len(parsed_section_names) >= 1: for section_name in agent_based_register.get_section_producers( parsed_section_names[0]): for perm in _section_permutations(parsed_section_names[1:]): yield (agent_based_register.get_section_plugin(section_name), ) + perm
def _make_oid_infos( self, *, persisted_sections: SNMPPersistedSections, selected_raw_sections: Optional[SelectedRawSections], prefetched_sections: Sequence[SectionName], ) -> Dict[SectionName, List[SNMPTree]]: oid_infos = {} # Dict[SectionName, List[SNMPTree]] if selected_raw_sections is None: section_names = self.detector( self.snmp_config, self._make_snmp_scan_sections(), on_error=self.on_snmp_scan_error, ) else: section_names = {s.name for s in selected_raw_sections.values()} section_names -= self.host_config.disabled_snmp_sections() for section_name in SNMPConfigurator._sort_section_names( section_names): plugin = agent_based_register.get_section_plugin(section_name) if not isinstance(plugin, SNMPSectionPlugin): self._logger.debug("%s: No such section definition", section_name) continue if section_name in prefetched_sections: continue # This checks data is configured to be persisted (snmp_fetch_interval) and recent enough. # Skip gathering new data here. The persisted data will be added later if section_name in persisted_sections: self._logger.debug( "%s: Skip fetching data (persisted info exists)", section_name) continue oid_infos[section_name] = plugin.trees return oid_infos
def _needs_redetection(section_name: SectionName) -> bool: section = agent_based_register.get_section_plugin(section_name) return len( agent_based_register.get_section_producers( section.parsed_section_name)) > 1
def test_inv_oracle_instance_multiline(): lines = [ [ 'SID', 'VERSION', 'OPENMODE', 'LOGINS', '_ARCHIVER', '123', '_DBID', 'LOGMODE', '_DATABASE_ROLE', '_FORCE_LOGGING', '_NAME', '080220151025', '_PLUGGABLE', '_CON_ID', '', '_PDBID', '_POPENMODE', '_PRESTRICTED', '_PTOTAL_SIZE', '_PRECOVERY_STATUS', '_PUP_SECONDS', '_PBLOCK_SIZE', ], [ 'SID', 'VERSION', '_OPENMODE', 'LOGINS', '_ARCHIVER', '_RAW_UP_SECONDS', '_DBID', 'LOGMODE', '_DATABASE_ROLE', '_FORCE_LOGGING', '_NAME', '080220151026', 'TRUE', '_CON_ID', 'PNAME', '_PDBID', 'POPENMODE', '_PRESTRICTED', '_PTOTAL_SIZE', '_PRECOVERY_STATUS', '456', '_PBLOCK_SIZE', ], ] section = agent_based_register.get_section_plugin( SectionName('oracle_instance')) parsed = section.parse_function(lines) # type: ignore[arg-type] inv_plugin = agent_based_register.get_inventory_plugin( InventoryPluginName('oracle_instance')) expected_data = [ TableRow( path=['software', 'applications', 'oracle', 'instance'], key_columns={ "sid": "SID", "pname": "", }, inventory_columns={ "version": "VERSION", "openmode": "OPENMODE", "logmode": 'LOGMODE', "logins": "LOGINS", "db_creation_time": "2015-02-08 10:25", }, ), TableRow( path=['software', 'applications', 'oracle', 'instance'], key_columns={ "sid": "SID", "pname": "PNAME", }, inventory_columns={ "version": "VERSION", "openmode": "POPENMODE", "logmode": 'LOGMODE', "logins": "ALLOWED", "db_creation_time": "2015-02-08 10:26", }, ), TableRow( path=['software', 'applications', 'oracle', 'instance'], key_columns={ "sid": "SID", "pname": "", }, status_columns={ "db_uptime": 123, }, ), TableRow( path=['software', 'applications', 'oracle', 'instance'], key_columns={ "sid": "SID", "pname": "PNAME", }, status_columns={ "db_uptime": 456, }, ), ] assert list(inv_plugin.inventory_function( parsed)) == expected_data # type: ignore[union-attr]
def test_parse_sap_hana_instance_status(info, expected_result): section_name = SectionName("sap_hana_instance_status") section_plugin = register.get_section_plugin(section_name) result = section_plugin.parse_function(info) assert result == expected_result
def test_parse_sap_hana_diskusage(info, expected_result): section_name = SectionName("sap_hana_diskusage") section_plugin = register.get_section_plugin(section_name) result = section_plugin.parse_function(info) assert result == expected_result
def _discover_host_labels_for_source_type( *, host_key: HostKey, parsed_sections_broker: ParsedSectionsBroker, discovery_parameters: DiscoveryParameters, ) -> Mapping[str, HostLabel]: try: host_data = parsed_sections_broker[host_key] except KeyError: return {} host_labels = {} try: # We do *not* process all available raw sections. Instead we see which *parsed* # sections would result from them, and then process those. parse_sections = { agent_based_register.get_section_plugin(rs).parsed_section_name for rs in host_data.sections } applicable_sections = parsed_sections_broker.determine_applicable_sections( parse_sections, host_key.source_type, ) console.vverbose("Trying host label discovery with: %s\n" % ", ".join(str(s.name) for s in applicable_sections)) for section_plugin in _sort_sections_by_label_priority( applicable_sections): kwargs = { 'section': parsed_sections_broker.get_parsed_section( host_key, section_plugin.parsed_section_name), } host_label_params = config.get_host_label_parameters( host_key.hostname, section_plugin) if host_label_params is not None: kwargs["params"] = host_label_params try: for label in section_plugin.host_label_function(**kwargs): console.vverbose( f" {label.name}: {label.value} ({section_plugin.name})\n" ) host_labels[label.name] = HostLabel( label.name, label.value, section_plugin.name, ) except (KeyboardInterrupt, MKTimeout): raise except Exception as exc: if cmk.utils.debug.enabled( ) or discovery_parameters.on_error == "raise": raise if discovery_parameters.on_error == "warn": console.error("Host label discovery of '%s' failed: %s\n" % (section_plugin.name, exc)) except KeyboardInterrupt: raise MKGeneralException("Interrupted by Ctrl-C.") return host_labels