def parse( self, raw_data: Result[TRawData, Exception], ) -> Result[THostSections, Exception]: if raw_data.is_error(): return Result.Error(raw_data.error) try: return Result.OK(self._parse(raw_data.ok)) except Exception as exc: return Result.Error(exc)
def test_cmp_ok(self, result): other = Result.OK(1) assert result.ok < other.ok assert result != other assert result < other assert result <= other assert result <= Result.OK(result.ok) assert other > result assert other >= result assert other >= Result.OK(other.ok)
def test_cmp_err(self, result): other = Result.Error(1) assert result.error < other.error assert result != other assert result < other assert result <= other assert result <= Result.Error(result.error) assert other > result assert other >= result assert other >= Result.Error(other.error)
def test_persist_option_populates_cache_info_and_persisted_sections( self, hostname, logger, monkeypatch, ): time_time = 1000 time_delta = 50 monkeypatch.setattr(time, "time", lambda: time_time) raw_data = b"\n".join(( b"<<<section:persist(%i)>>>" % (time_time + time_delta), b"first line", b"second line", )) ahs = AgentParser(hostname, Path(""), logger).parse(Result.OK(raw_data)).ok assert ahs.sections == { SectionName("section"): [["first", "line"], ["second", "line"]] } assert ahs.cache_info == { SectionName("section"): (time_time, time_delta) } assert ahs.piggybacked_raw_data == {} assert ahs.persisted_sections == { SectionName("section"): (1000, 1050, [["first", "line"], ["second", "line"]]), }
def patch_io(self, monkeypatch): class DummyHostSection(ABCHostSections): def _extend_section(self, section_name, section_content): pass for fetcher in (IPMIFetcher, PiggybackFetcher, ProgramFetcher, SNMPFetcher, TCPFetcher): monkeypatch.setattr(fetcher, "__enter__", lambda self: self) monkeypatch.setattr( fetcher, "fetch", lambda self, mode, fetcher=fetcher: {} if fetcher is SNMPFetcher else b"", ) monkeypatch.setattr( ABCSource, "parse", lambda self, raw_data: Result.OK( DummyHostSection( sections={ SectionName("section_name_%s" % self.hostname): [["section_content"]] }, cache_info={}, piggybacked_raw_data={}, persisted_sections="", ), ), )
def test_eq(self, result, value): assert (result == value) is False assert (value == result) is False assert (result != value) is True assert (value != result) is True ok = Result.OK(value) assert (result == ok) is True assert (ok == result) is True assert (result != ok) is False assert (ok != result) is False err = Result.Error(value) assert (result == err) is False assert (err == result) is False assert (result != err) is True assert (err != result) is True
def test_cmp_ok(self, result, value): other = Result.OK(value) assert result.error == other.ok assert result != other assert result > other assert result >= other assert other < result assert other <= result
def test_attribute_defaults(monkeypatch, ipaddress, mode): hostname = "testhost" Scenario().add_host(hostname).apply(monkeypatch) source = PiggybackSource(hostname, ipaddress, mode=mode) assert source.hostname == hostname assert source.ipaddress == ipaddress assert source.mode is mode assert source.description.startswith("Process piggyback data from") assert source.summarize(Result.OK(AgentHostSections())) == (0, "", []) assert source.id == "piggyback"
def test_cmp_err(self, result, value): other = Result.Error(value) assert result.ok == other.error assert result != other assert result < other assert result <= other assert other > result assert other >= result
def test_piggyback_populates_piggyback_raw_data(self, hostname, logger, monkeypatch): time_time = 1000 monkeypatch.setattr(time, "time", lambda: time_time) monkeypatch.setattr(config.HostConfig, "check_mk_check_interval", 10) raw_data = b"\n".join(( b"<<<<piggyback header>>>>", # <- space is OK b"<<<section>>>", b"first line", b"second line", b"<<<<>>>>", # <- omitting this line makes no difference b"<<<<piggyback_other>>>>", b"<<<other_section>>>", b"first line", b"second line", b"<<<<>>>>", b"<<<<../b:l*a../>>>>", b"<<<section>>>", b"first line", b"<<<</b_l-u/>>>>", b"<<<section>>>", b"first line", )) ahs = AgentParser(hostname, Path(""), logger).parse(Result.OK(raw_data)).ok assert ahs.sections == {} assert ahs.cache_info == {} assert ahs.piggybacked_raw_data == { "piggyback_header": [ b"<<<section:cached(1000,900)>>>", b"first line", b"second line", ], "piggyback_other": [ b"<<<other_section:cached(1000,900)>>>", b"first line", b"second line", ], ".._b_l_a.._": [ b"<<<section:cached(1000,900)>>>", b"first line", ], "_b_l-u_": [ b"<<<section:cached(1000,900)>>>", b"first line", ], } assert ahs.persisted_sections == {}
def test_defaults(self, ipaddress, mode, monkeypatch): hostname = "testhost" Scenario().add_host(hostname).apply(monkeypatch) source = TCPSource( hostname, ipaddress, mode=mode, ) assert source.summarize(Result.OK(AgentHostSections())) == ( 0, "Version: unknown, OS: unknown", [], )
def test_attribute_defaults(mode, monkeypatch): hostname = "testhost" Scenario().add_host(hostname).apply(monkeypatch) host_config = config.get_config_cache().get_host_config(hostname) ipaddress = ip_lookup.lookup_mgmt_board_ip_address(host_config) source = IPMISource(hostname, ipaddress, mode=mode) assert source.hostname == hostname assert source.ipaddress == ipaddress assert source.mode is mode assert source.description == "Management board - IPMI" assert source.source_type is SourceType.MANAGEMENT assert source.summarize(Result.OK(AgentHostSections())) == (0, "Version: unknown", []) assert source.id == "mgmt_ipmi" assert source.cpu_tracking_id == "mgmt_ipmi"
def parse(self, raw_data: Result[TRawData, Exception]) -> Result[THostSections, Exception]: try: host_sections = self._make_parser().parse(raw_data) if host_sections.is_error(): return host_sections host_sections.ok.add_persisted_sections( self.persisted_sections_file_path, self.use_outdated_persisted_sections, logger=self._logger, ) return host_sections except Exception as exc: self._logger.log(VERBOSE, "ERROR: %s", exc) if cmk.utils.debug.enabled(): raise return Result.Error(exc)
def test_raw_section_populates_sections(self, hostname, logger): raw_data = b"\n".join(( b"<<<a_section>>>", b"first line", b"second line", b"<<<another_section>>>", b"first line", b"second line", )) ahs = AgentParser(hostname, Path(""), logger).parse(Result.OK(raw_data)).ok assert ahs.sections == { SectionName("a_section"): [["first", "line"], ["second", "line"]], SectionName("another_section"): [["first", "line"], ["second", "line"]], } assert ahs.cache_info == {} assert ahs.piggybacked_raw_data == {} assert ahs.persisted_sections == {}
def fetch(self) -> Result[TRawData, Exception]: try: with self._make_fetcher() as fetcher: return Result.OK(fetcher.fetch(self.mode)) except Exception as exc: return Result.Error(exc)
def result(self, value): return Result.Error(value)
def test_with_exception(self, source): assert source.summarize(Result.Error(Exception())) == (3, "(?)", [])
def test_defaults(self, source): assert source.summarize(Result.OK(AgentHostSections())) == (0, "Success", [])
def test_defaults(self, source): assert source.summarize(Result.OK( source.default_host_sections)) == (0, "", [])
def result(self, value): return Result.OK(value)
def test_with_MKTimeout_exception(self, source): assert source.summarize(Result.Error(MKTimeout())) == (2, "(!!)", [])
def test_with_MKEmptyAgentData_exception(self, source): assert source.summarize(Result.Error(MKEmptyAgentData())) == (2, "(!!)", [])
def check(_, *args, **kwargs): return Result.OK(AgentHostSections(sections={section_name: [[str(section_name)]]}))
def update_host_sections( multi_host_sections: MultiHostSections, nodes: Iterable[Tuple[HostName, Optional[HostAddress], Sequence[ABCSource]]], *, max_cachefile_age: int, selected_raw_sections: Optional[SelectedRawSections], host_config: HostConfig, fetcher_messages: Optional[List[FetcherMessage]] = None, ) -> Sequence[Tuple[ABCSource, Result[ABCHostSections, Exception]]]: """Gather ALL host info data for any host (hosts, nodes, clusters) in Check_MK. Communication errors are not raised through by this functions. All agent related errors are caught by the source.run() method and saved in it's _exception attribute. The caller should use source.get_summary_result() to get the state, output and perfdata of the agent excecution or source.exception to get the exception object. """ if fetcher_messages is None: console.verbose("%s+%s %s\n", tty.yellow, tty.normal, "Fetching data".upper()) else: console.verbose("%s+%s %s\n", tty.yellow, tty.normal, "Parse fetcher results".upper()) # Special agents can produce data for the same check_plugin_name on the same host, in this case # the section lines need to be extended result: List[Tuple[ABCSource, Result[ABCHostSections, Exception]]] = [] for hostname, ipaddress, sources in nodes: for source_index, source in enumerate(sources): if host_config.nodes is None: source.selected_raw_sections = selected_raw_sections else: source.selected_raw_sections = _make_piggybacked_sections( host_config) source.file_cache.max_age = max_cachefile_age host_sections = multi_host_sections.setdefault( HostKey(hostname, ipaddress, source.source_type), source.default_host_sections, ) if fetcher_messages is None: # We don't have raw_data yet (from the previously executed fetcher), execute the # fetcher here. raw_data = source.fetch() else: # The Microcore has handed over results from the previously executed fetcher. # Extract the raw_data for the source we currently fetcher_message = fetcher_messages[source_index] # TODO (ml): Can we somehow verify that this is correct? #if fetcher_message["fetcher_type"] != source.id: # raise LookupError("Checker and fetcher missmatch") # TODO: Handle status != 0 assert fetcher_message.header.status == 0 raw_data = Result.OK(fetcher_message.raw_data()) host_section = source.parse(raw_data) result.append((source, host_section)) if host_section.is_ok(): host_sections.update(host_section.ok) # Store piggyback information received from all sources of this host. This # also implies a removal of piggyback files received during previous calls. host_sections = multi_host_sections.setdefault( HostKey(hostname, ipaddress, SourceType.HOST), AgentHostSections(), ) cmk.utils.piggyback.store_piggyback_raw_data( hostname, host_sections.piggybacked_raw_data, ) return result