def _get_needed_check_plugin_names(host_config): import cmk_base.check_table as check_table needed_check_plugin_names = set([]) # In case the host is monitored as special agent, the check plugin for the special agent needs # to be loaded sources = data_sources.DataSources(host_config.hostname, ipaddress=None) for source in sources.get_data_sources(): if isinstance(source, data_sources.programs.SpecialAgentDataSource): needed_check_plugin_names.add( source.special_agent_plugin_file_name) # Collect the needed check plugin names using the host check table for check_plugin_name in check_table.get_needed_check_names( host_config.hostname, filter_mode="include_clustered", skip_ignored=False): if config.check_info[check_plugin_name].get("extra_sections"): for section_name in config.check_info[check_plugin_name][ "extra_sections"]: if section_name in config.check_info: needed_check_plugin_names.add(section_name) needed_check_plugin_names.add(check_plugin_name) # Also include the check plugins of the cluster nodes to be able to load # the autochecks of the nodes if host_config.is_cluster: for node in host_config.nodes: needed_check_plugin_names.update( check_table.get_needed_check_names(node, skip_ignored=False)) return needed_check_plugin_names
def get_host_sections(self, max_cachefile_age=None): """Gather ALL host info data for any host (hosts, nodes, clusters) in Check_MK. Returns a dictionary object of already parsed HostSections() constructs for each related host. For single hosts it's just a single entry in the dictionary. For cluster hosts it contains one HostSections() entry for each related node. Communication errors are not raised through by this functions. All agent related errors are caught by the source.run() method and saved in it's _exception attribute. The caller should use source.get_summary_result() to get the state, output and perfdata of the agent excecution or source.exception() to get the exception object. """ console.step("Fetching data") # First abstract clusters/nodes/hosts hosts = [] nodes = self._host_config.nodes if nodes is not None: for node_hostname in nodes: node_ipaddress = ip_lookup.lookup_ip_address(node_hostname) node_check_names = check_table.get_needed_check_names(node_hostname, remove_duplicates=True, filter_mode="only_clustered") node_data_sources = DataSources(node_hostname, node_ipaddress) node_data_sources.enforce_check_plugin_names(set(node_check_names)) hosts.append((node_hostname, node_ipaddress, node_data_sources, config.cluster_max_cachefile_age)) else: hosts.append((self._hostname, self._ipaddress, self, config.check_max_cachefile_age)) if nodes: import cmk_base.data_sources.abstract as abstract abstract.DataSource.set_may_use_cache_file() # Special agents can produce data for the same check_plugin_name on the same host, in this case # the section lines need to be extended multi_host_sections = MultiHostSections() for this_hostname, this_ipaddress, these_sources, this_max_cachefile_age in hosts: # In case a max_cachefile_age is given with the function call, always use this one # instead of the host individual one. This is only used in discovery mode. if max_cachefile_age is not None: these_sources.set_max_cachefile_age(max_cachefile_age) else: these_sources.set_max_cachefile_age(this_max_cachefile_age) host_sections =\ multi_host_sections.add_or_get_host_sections(this_hostname, this_ipaddress) for source in these_sources.get_data_sources(): host_sections_from_source = source.run() host_sections.update(host_sections_from_source) # Store piggyback information received from all sources of this host. This # also implies a removal of piggyback files received during previous calls. cmk_base.piggyback.store_piggyback_raw_data(this_hostname, host_sections.piggybacked_raw_data) return multi_host_sections