Ejemplo n.º 1
0
    def _get_agent_data(self) -> Tuple[AgentRawData, TransportProtocol]:
        try:
            raw_protocol = self._socket.recv(2, socket.MSG_WAITALL)
        except socket.error as e:
            raise MKFetcherError(f"Communication failed: {e}") from e

        protocol = self._detect_transport_protocol(raw_protocol)

        self._validate_protocol(protocol)

        if protocol is TransportProtocol.TLS:
            with self._wrap_tls() as ssock:
                raw_agent_data = self._recvall(ssock)
            try:
                agent_data = AgentCtlMessage.from_bytes(raw_agent_data).payload
            except ValueError as e:
                raise MKFetcherError(
                    f"Failed to deserialize versioned agent data: {e!r}"
                ) from e
            return AgentRawData(
                agent_data[2:]), self._detect_transport_protocol(
                    agent_data[:2])

        return AgentRawData(self._recvall(self._socket,
                                          socket.MSG_WAITALL)), protocol
Ejemplo n.º 2
0
    def _get_agent_data(self) -> Tuple[AgentRawData, TransportProtocol]:
        try:
            raw_protocol = self._socket.recv(2, socket.MSG_WAITALL)
        except socket.error as e:
            raise MKFetcherError(f"Communication failed: {e}") from e

        protocol = self._detect_transport_protocol(
            raw_protocol, empty_msg="Empty output from host %s:%d" % self.address
        )

        controller_uuid = get_uuid_link_manager().get_uuid(self.host_name)
        self._validate_protocol(protocol, is_registered=controller_uuid is not None)

        if protocol is TransportProtocol.TLS:
            with self._wrap_tls(controller_uuid) as ssock:
                raw_agent_data = self._recvall(ssock)
            try:
                agent_data = AgentCtlMessage.from_bytes(raw_agent_data).payload
            except ValueError as e:
                raise MKFetcherError(f"Failed to deserialize versioned agent data: {e!r}") from e
            return AgentRawData(agent_data[2:]), self._detect_transport_protocol(
                agent_data[:2], empty_msg="Empty payload from controller at %s:%d" % self.address
            )

        return AgentRawData(self._recvall(self._socket, socket.MSG_WAITALL)), protocol
Ejemplo n.º 3
0
    def _sensors_section(self) -> AgentRawData:
        if self._command is None:
            raise MKFetcherError("Not connected")

        self._logger.debug("Fetching sensor data via UDP from %s:623", self._command.bmc)

        try:
            sdr = ipmi_sdr.SDR(self._command)
        except NotImplementedError as e:
            self._logger.log(VERBOSE, "Failed to fetch sensor data: %r", e)
            self._logger.debug("Exception", exc_info=True)
            return AgentRawData(b"")

        sensors = []
        has_no_gpu = not self._has_gpu()
        for ident in sdr.get_sensor_numbers():
            sensor = sdr.sensors[ident]
            rsp = self._command.raw_command(command=0x2d,
                                            netfn=4,
                                            rslun=sensor.sensor_lun,
                                            data=(sensor.sensor_number,))
            if 'error' in rsp:
                continue

            reading = sensor.decode_sensor_reading(rsp['data'])
            if reading is not None:
                # sometimes (wrong) data for GPU sensors is reported, even if
                # not installed
                if "GPU" in reading.name and has_no_gpu:
                    continue
                sensors.append(IPMIFetcher._parse_sensor_reading(sensor.sensor_number, reading))

        return AgentRawData(b"<<<mgmt_ipmi_sensors:sep(124)>>>\n" +
                            b"".join(b"|".join(sensor) + b"\n" for sensor in sensors))
Ejemplo n.º 4
0
    def _handle_false_positive_warnings(
            reading: ipmi_sdr.SensorReading) -> AgentRawData:
        """This is a workaround for a pyghmi bug
        (bug report: https://bugs.launchpad.net/pyghmi/+bug/1790120)

        For some sensors undefined states are looked up, which results in readings of the form
        {'states': ['Present',
                    'Unknown state 8 for reading type 111/sensor type 8',
                    'Unknown state 9 for reading type 111/sensor type 8',
                    'Unknown state 10 for reading type 111/sensor type 8',
                    'Unknown state 11 for reading type 111/sensor type 8',
                    'Unknown state 12 for reading type 111/sensor type 8', ...],
        'health': 1, 'name': 'PS Status', 'imprecision': None, 'units': '',
        'state_ids': [552704, 552712, 552713, 552714, 552715, 552716, 552717, 552718],
        'type': 'Power Supply', 'value': None, 'unavailable': 0}

        The health warning is set, but only due to the lookup errors. We remove the lookup
        errors, and see whether the remaining states are meaningful.
        """
        states = [
            s.encode("utf-8") for s in reading.states
            if not s.startswith("Unknown state ")
        ]

        if not states:
            return AgentRawData(b"no state reported")

        if any(b"non-critical" in s for s in states):
            return AgentRawData(b"WARNING")

        # just keep all the available info. It should be dealt with in
        # ipmi_sensors.include (freeipmi_status_txt_mapping),
        # where it will default to 2(CRIT)
        return AgentRawData(b', '.join(states))
Ejemplo n.º 5
0
def process(output: AgentRawData) -> AgentRawData:
    try:
        while True:
            i = output.find(b'%{')
            if i == -1:
                break
            e = output.find(b'}', i)
            if e == -1:
                break
            # 2.1: Disabled this feature because of the eval() call below. To make this secure it
            # would need some rework. It is a not documented feature and was rarely used. There is a
            # good chance that we can remove this feature. To have the chance to re-enable it
            # quickly, we leave everything in place for now. In case no one complains about the
            # missing feature, remove it with 2.3.
            raise MKGeneralException(
                "Sorry, the agent simulator functions %{...} can not be used anymore. "
                "They have been removed for security reasons. In case you need them, please let "
                "us know.")
            #simfunc = output[i + 2:e]
            #replacement = str(eval(b"agentsim_" + simfunc)).encode("utf-8")  # nosec
            #output = AgentRawData(output[:i] + replacement + output[e + 1:])
    except MKGeneralException:
        raise
    except Exception:
        if cmk.utils.debug.enabled():
            raise

    return output
Ejemplo n.º 6
0
    def _get_source_labels_section(self) -> AgentRawData:
        """Return a <<<labels>>> agent section which adds the piggyback sources
        to the labels of the current host"""
        if not self._sources:
            return AgentRawData(b"")

        labels = {"cmk/piggyback_source_%s" % src.source_hostname: "yes" for src in self._sources}
        return AgentRawData(b'<<<labels:sep(0)>>>\n%s\n' % json.dumps(labels).encode("utf-8"))
Ejemplo n.º 7
0
 def _get_main_section(self) -> AgentRawData:
     raw_data = AgentRawData(b"")
     for src in self._sources:
         if src.successfully_processed:
             # !! Important for Check_MK and Check_MK Discovery service !!
             #   - sources contains ALL file infos and is not filtered
             #     in cmk/base/piggyback.py as in previous versions
             #   - Check_MK gets the processed file info reasons and displays them in
             #     it's service details
             #   - Check_MK Discovery: Only shows vanished/new/... if raw data is not
             #     added; ie. if file_info is not successfully processed
             raw_data = AgentRawData(raw_data + src.raw_data)
     return raw_data
Ejemplo n.º 8
0
    def test_keep_outdated_false(self, logger, monkeypatch):
        monkeypatch.setattr(time, "time", lambda c=itertools.count(1000, 50): next(c))

        raw_data = AgentRawData(b"<<<another_section>>>")
        section_store = MockStore(
            "/dev/null",
            PersistedSections[AgentRawDataSection](
                {
                    SectionName("section"): (500, 600, []),
                }
            ),
            logger=logger,
        )
        parser = AgentParser(
            HostName("testhost"),
            section_store,
            check_interval=42,
            keep_outdated=False,
            translation={},
            encoding_fallback="ascii",
            simulation=False,
            logger=logger,
        )

        ahs = parser.parse(raw_data, selection=NO_SELECTION)
        assert ahs.sections == {SectionName("another_section"): []}
        assert ahs.cache_info == {}
        assert ahs.piggybacked_raw_data == {}
        assert section_store.load() == {}
Ejemplo n.º 9
0
    def test_update_store_with_newest(self, logger):
        section_store = MockStore(
            "/dev/null",
            PersistedSections[AgentRawDataSection](
                {
                    SectionName("section"): (0, 0, [["oldest"]]),
                }
            ),
            logger=logger,
        )
        raw_data = AgentRawData(b"<<<section>>>\nnewest")
        parser = AgentParser(
            HostName("testhost"),
            section_store,
            check_interval=0,
            keep_outdated=True,
            translation={},
            encoding_fallback="ascii",
            simulation=False,
            logger=logger,
        )

        ahs = parser.parse(raw_data, selection=NO_SELECTION)
        assert ahs.sections == {SectionName("section"): [["newest"]]}
        assert ahs.cache_info == {}
        assert ahs.piggybacked_raw_data == {}
        assert section_store.load() == PersistedSections[AgentRawDataSection](
            {
                SectionName("section"): (0, 0, [["oldest"]]),
            }
        )
Ejemplo n.º 10
0
    def test_section_lines_are_correctly_ordered_with_different_separators_and_piggyback(
        self, parser, store, monkeypatch
    ):
        monkeypatch.setattr(time, "time", lambda c=itertools.count(1000, 50): next(c))
        monkeypatch.setattr(parser, "cache_piggybacked_data_for", 900)

        raw_data = AgentRawData(
            b"\n".join(
                (
                    b"<<<<piggy>>>>",
                    b"<<<section:sep(124)>>>",
                    b"a|1",
                    b"<<<section:sep(44)>>>",
                    b"b,2",
                    b"<<<section:sep(124)>>>",
                    b"c|3",
                )
            )
        )

        ahs = parser.parse(raw_data, selection=NO_SELECTION)
        assert ahs.sections == {}
        assert ahs.cache_info == {}
        assert ahs.piggybacked_raw_data == {
            "piggy": [
                b"<<<section:cached(1000,900):sep(124)>>>",
                b"a|1",
                b"<<<section:cached(1000,900):sep(44)>>>",
                b"b,2",
                b"<<<section:cached(1000,900):sep(124)>>>",
                b"c|3",
            ],
        }
        assert store.load() == {}
Ejemplo n.º 11
0
    def test_section_lines_are_correctly_ordered_with_different_separators(self, parser, store):
        raw_data = AgentRawData(
            b"\n".join(
                (
                    b"<<<section:sep(124)>>>",
                    b"a|1",
                    b"<<<section:sep(44)>>>",
                    b"b,2",
                    b"<<<section:sep(124)>>>",
                    b"c|3",
                )
            )
        )

        ahs = parser.parse(raw_data, selection=NO_SELECTION)
        assert ahs.sections == {
            SectionName("section"): [
                ["a", "1"],
                ["b", "2"],
                ["c", "3"],
            ],
        }
        assert ahs.cache_info == {}
        assert ahs.piggybacked_raw_data == {}
        assert store.load() == {}
Ejemplo n.º 12
0
    def test_no_sources(self, cluster, nodes, config_cache, host_config):
        sources = make_cluster_sources(config_cache, host_config)

        host_sections = _collect_host_sections(
            fetched=[
                (
                    source,
                    FetcherMessage.from_raw_data(
                        result.OK(AgentRawData(b"")),
                        Snapshot.null(),
                        FetcherType.PIGGYBACK,
                    ),
                )
                for source in sources
            ],
            file_cache_max_age=file_cache.MaxAge.none(),
            selected_sections=NO_SELECTION,
        )[0]
        assert len(host_sections) == len(nodes)

        key_clu = HostKey(cluster, None, SourceType.HOST)
        assert key_clu not in host_sections

        for hostname, addr in nodes.items():
            key = HostKey(hostname, addr, SourceType.HOST)
            assert key in host_sections

            section = host_sections[key]
            assert section.sections[SectionName("section_name_%s" % hostname)] == [
                ["section_content_%s" % hostname]
            ]
            assert not section.cache_info
            assert not section.piggybacked_raw_data
Ejemplo n.º 13
0
    def _decrypt(self, output: AgentRawData) -> AgentRawData:
        if output.startswith(b"<<<"):
            self._logger.debug("Output is not encrypted")
            if self.encryption_settings["use_regular"] == "enforce":
                raise MKFetcherError(
                    "Agent output is plaintext but encryption is enforced by configuration"
                )
            return output

        if self.encryption_settings["use_regular"] not in ["enforce", "allow"]:
            self._logger.debug("Output is not encrypted")
            return output

        try:
            self._logger.debug("Decrypt encrypted output")
            output = self._real_decrypt(output)
        except MKFetcherError:
            raise
        except Exception as e:
            if self.encryption_settings["use_regular"] == "enforce":
                raise MKFetcherError("Failed to decrypt agent output: %s" % e)

            # of course the package might indeed have been encrypted but
            # in an incorrect format, but how would we find that out?
            # In this case processing the output will fail

        if not output:  # may be caused by xinetd not allowing our address
            raise MKFetcherError("Empty output from agent at %s:%d" %
                                 self.address)
        if len(output) < 16:
            raise MKFetcherError("Too short output from agent: %r" % output)
        return output
Ejemplo n.º 14
0
    def _parse_sensor_reading(
            number: int,
            reading: ipmi_sdr.SensorReading) -> List[AgentRawData]:
        # {'states': [], 'health': 0, 'name': 'CPU1 Temp', 'imprecision': 0.5,
        #  'units': '\xc2\xb0C', 'state_ids': [], 'type': 'Temperature',
        #  'value': 25.0, 'unavailable': 0}]]
        health_txt = b"N/A"
        if reading.health >= ipmi_const.Health.Failed:
            health_txt = b"FAILED"
        elif reading.health >= ipmi_const.Health.Critical:
            health_txt = b"CRITICAL"
        elif reading.health >= ipmi_const.Health.Warning:
            # workaround for pyghmi bug: https://bugs.launchpad.net/pyghmi/+bug/1790120
            health_txt = IPMIFetcher._handle_false_positive_warnings(reading)
        elif reading.health == ipmi_const.Health.Ok:
            health_txt = b"OK"

        return [
            AgentRawData(_) for _ in (
                b"%d" % number,
                ensure_binary(reading.name),
                ensure_binary(reading.type),
                (b"%0.2f" % reading.value) if reading.value else b"N/A",
                ensure_binary(reading.units
                              ) if reading.units != b"\xc2\xb0C" else b"C",
                health_txt,
            )
        ]
Ejemplo n.º 15
0
 def _collect_until(oid: OID, oid_prefix: OID, lines: List[str], index: int,
                    direction: int) -> SNMPRowInfo:
     rows = []
     # Handle case, where we run after the end of the lines list
     if index >= len(lines):
         if direction > 0:
             return []
         index -= 1
     while True:
         line = lines[index]
         parts = line.split(None, 1)
         o = parts[0]
         if o.startswith('.'):
             o = o[1:]
         if o == oid or o.startswith(oid_prefix + "."):
             if len(parts) > 1:
                 # FIXME: This encoding ping-pong os horrible...
                 value = ensure_str(
                     agent_simulator.process(AgentRawData(ensure_binary(parts[1]),),),)
             else:
                 value = ""
             # Fix for missing starting oids
             rows.append(('.' + o, strip_snmp_value(value)))
             index += direction
             if index < 0 or index >= len(lines):
                 break
         else:
             break
     return rows
Ejemplo n.º 16
0
 def raw_data(self, file_cache):
     if isinstance(file_cache, DefaultAgentFileCache):
         return AgentRawData(b"<<<check_mk>>>\nagent raw data")
     assert isinstance(file_cache, SNMPFileCache)
     table: SNMPTable = []
     raw_data = SNMPRawData({SectionName("X"): table})
     return raw_data
Ejemplo n.º 17
0
    def _decrypt(self, output: AgentRawData) -> AgentRawData:
        if not output:
            return output  # nothing to to, validation will fail

        if output.startswith(b"<<<"):
            self._logger.debug("Output is not encrypted")
            if self.encryption_settings["use_regular"] == "enforce":
                raise MKFetcherError(
                    "Agent output is plaintext but encryption is enforced by configuration"
                )
            return output

        self._logger.debug("Output is encrypted or invalid")
        if self.encryption_settings["use_regular"] == "disable":
            raise MKFetcherError(
                "Agent output is either invalid or encrypted but encryption is disabled by configuration"
            )

        try:
            self._logger.debug("Try to decrypt output")
            output = self._decrypt_agent_data(output=output)
        except MKFetcherError:
            raise
        except Exception as e:
            if self.encryption_settings["use_regular"] == "enforce":
                raise MKFetcherError("Failed to decrypt agent output: %s" % e)

        # of course the package might indeed have been encrypted but
        # in an incorrect format, but how would we find that out?
        # In this case processing the output will fail
        return output
Ejemplo n.º 18
0
    def test_section_filtering(self, parser, monkeypatch):
        monkeypatch.setattr(time, "time", lambda: 1000)
        raw_data = AgentRawData(b"\n".join((
            b"<<<<piggyback_header>>>>",
            b"<<<deselected>>>",
            b"1st line",
            b"2nd line",
            b"<<<selected>>>",
            b"3rd line",
            b"4th line",
            b"<<<<>>>>",
            b"<<<deselected>>>",
            b"5th line",
            b"6th line",
            b"<<<selected>>>",
            b"7th line",
            b"8th line",
        )))

        ahs = parser.parse(raw_data, selection={SectionName("selected")})

        assert ahs.sections == {
            SectionName("selected"): [["7th", "line"], ["8th", "line"]],
        }
        assert ahs.cache_info == {}
        assert ahs.piggybacked_raw_data == {
            "piggyback_header": [
                b"<<<selected:cached(1000,0)>>>",
                b"3rd line",
                b"4th line",
            ]
        }
Ejemplo n.º 19
0
    def test_persist_option_and_persisted_sections(self, parser, mocker,
                                                   monkeypatch):
        time_time = 1000
        time_delta = 50
        monkeypatch.setattr(time, "time", lambda: time_time)
        monkeypatch.setattr(
            SectionStore,
            "load",
            lambda self: PersistedSections({
                SectionName("persisted"): (42, 69, [["content"]]),
            }),
        )
        # Patch IO:
        monkeypatch.setattr(SectionStore, "store", lambda self, sections: None)

        raw_data = AgentRawData(b"\n".join((
            b"<<<section:persist(%i)>>>" % (time_time + time_delta),
            b"first line",
            b"second line",
        )))

        ahs = parser.parse(raw_data, selection=NO_SELECTION)

        assert ahs.sections == {
            SectionName("section"): [["first", "line"], ["second", "line"]],
            SectionName("persisted"): [["content"]],
        }
        assert ahs.cache_info == {
            SectionName("section"): (time_time, time_delta),
            SectionName("persisted"): (42, 27),
        }
        assert ahs.piggybacked_raw_data == {}
Ejemplo n.º 20
0
    def test_raw_section_populates_sections(self, parser, store):
        raw_data = AgentRawData(
            b"\n".join(
                (
                    b"<<<a_section>>>",
                    b"first line",
                    b"second line",
                    b"<<<>>>",
                    b"<<<another_section>>>",
                    b"first line",
                    b"second line",
                    b"<<<>>>",
                )
            )
        )

        ahs = parser.parse(raw_data, selection=NO_SELECTION)

        assert ahs.sections == {
            SectionName("a_section"): [["first", "line"], ["second", "line"]],
            SectionName("another_section"): [["first", "line"], ["second", "line"]],
        }
        assert ahs.cache_info == {}
        assert ahs.piggybacked_raw_data == {}
        assert store.load() == {}
Ejemplo n.º 21
0
    def test_persist_option_and_persisted_sections(self, parser, store, mocker,
                                                   monkeypatch):
        monkeypatch.setattr(time,
                            "time",
                            lambda c=itertools.count(1000, 50): next(c))
        monkeypatch.setattr(
            SectionStore,
            "load",
            lambda self: PersistedSections[AgentRawDataSection]({
                SectionName("persisted"): (42, 69, [["content"]]),
            }),
        )
        # Patch IO:
        monkeypatch.setattr(SectionStore, "store", lambda self, sections: None)

        raw_data = AgentRawData(b"\n".join((
            b"<<<section:persist(%i)>>>" % (1000 + 50),
            b"first line",
            b"second line",
        )))

        ahs = parser.parse(raw_data, selection=NO_SELECTION)

        assert ahs.sections == {
            SectionName("section"): [["first", "line"], ["second", "line"]],
            SectionName("persisted"): [["content"]],
        }
        assert ahs.cache_info == {
            SectionName("section"): (1000, 50),
            SectionName("persisted"): (42, 27),
        }
        assert ahs.piggybacked_raw_data == {}
        assert store.load() == PersistedSections[AgentRawDataSection]({
            SectionName("persisted"): (42, 69, [["content"]]),
        })
Ejemplo n.º 22
0
    def _fetch_from_io(self, mode: Mode) -> AgentRawData:
        """
        The active agent cannot really 'fetch' live data.
        We consider data 'live', if they have been written to the cache
        by the receiver quite recently.
        """

        cache_file_path = self.file_cache.make_path(mode)

        try:
            if time.time() - cache_file_path.stat(
            ).st_mtime > self.allowed_age:
                raise MKFetcherError(
                    f"No data received within the last {self.allowed_age}s")

            raw_data = cache_file_path.read_bytes()
        except FileNotFoundError as exc:
            if cmk.utils.debug.enabled():
                raise
            raise MKFetcherError("No data has been sent") from exc

        if len(raw_data) < 16:  # be consistent with TCPFetcher
            raise MKFetcherError("Received data set is too small")

        return AgentRawData(raw_data)
    def test_no_sources(self, cluster, nodes, config_cache, host_config):
        made_nodes = make_nodes(config_cache, host_config, None, sources=())

        host_sections = _collect_host_sections(
            nodes=made_nodes,
            file_cache_max_age=file_cache.MaxAge.none(),
            fetcher_messages=[
                # We do not pass sources explicitly but still append Piggyback.
                FetcherMessage.from_raw_data(
                    result.OK(AgentRawData(b"")),
                    Snapshot.null(),
                    FetcherType.PIGGYBACK,
                ) for _n in made_nodes
            ],
            selected_sections=NO_SELECTION,
        )[0]
        assert len(host_sections) == len(nodes)

        key_clu = HostKey(cluster, None, SourceType.HOST)
        assert key_clu not in host_sections

        for hostname, addr in nodes.items():
            key = HostKey(hostname, addr, SourceType.HOST)
            assert key in host_sections

            section = host_sections[key]
            # yapf: disable
            assert (section.sections[SectionName("section_name_%s" % hostname)]
                    == [["section_content_%s" % hostname]])
            assert not section.cache_info
            assert not section.piggybacked_raw_data
Ejemplo n.º 24
0
    def test_update_with_store_and_non_persisting_raw_data(self):
        section_store = MockStore(PersistedSections[AgentRawDataSection]({
            SectionName("stored"): (0, 0, []),
        }))
        raw_data = AgentRawData(b"<<<fresh>>>")
        parser = AgentParser(
            "testhost",
            section_store,
            check_interval=0,
            keep_outdated=True,
            translation={},
            encoding_fallback="ascii",
            simulation=False,
            logger=logging.getLogger("test"),
        )

        ahs = parser.parse(raw_data, selection=NO_SELECTION)
        assert ahs.sections == {
            SectionName("fresh"): [],
            SectionName("stored"): [],
        }
        assert ahs.cache_info == {SectionName("stored"): (0, 0)}
        assert ahs.piggybacked_raw_data == {}
        assert section_store.load() == PersistedSections[AgentRawDataSection]({
            SectionName("stored"): (0, 0, []),
        })
Ejemplo n.º 25
0
    def test_persist_option_populates_cache_info_and_persisted_sections(
        self,
        hostname,
        store,
        logger,
        monkeypatch,
    ):
        time_time = 1000
        time_delta = 50
        monkeypatch.setattr(time, "time", lambda: time_time)

        raw_data = AgentRawData(b"\n".join((
            b"<<<section:persist(%i)>>>" % (time_time + time_delta),
            b"first line",
            b"second line",
        )))

        ahs = AgentParser(hostname, store, False, logger).parse(raw_data)

        assert ahs.sections == {
            SectionName("section"): [["first", "line"], ["second", "line"]]
        }
        assert ahs.cache_info == {
            SectionName("section"): (time_time, time_delta)
        }
        assert ahs.piggybacked_raw_data == {}
        assert ahs.persisted_sections == PersistedSections[
            AgentSectionContent]({
                SectionName("section"): (1000, 1050, [["first", "line"],
                                                      ["second", "line"]]),
            })
Ejemplo n.º 26
0
    def test_closing_piggyback_out_of_piggyback_section_closes_section(self, parser, store):
        raw_data = AgentRawData(
            b"\n".join(
                (
                    b"<<<a_section>>>",
                    b"first line",
                    b"second line",
                    b"<<<<>>>>",  # noop
                    b"<<<<>>>>",  # noop
                    b"<<<another_section>>>",
                    b"a line",
                    b"b line",
                )
            )
        )

        ahs = parser.parse(raw_data, selection=NO_SELECTION)
        assert ahs.sections == {
            SectionName("a_section"): [
                ["first", "line"],
                ["second", "line"],
            ],
            SectionName("another_section"): [
                ["a", "line"],
                ["b", "line"],
            ],
        }
        assert ahs.cache_info == {}
        assert ahs.piggybacked_raw_data == {}
        assert store.load() == {}
Ejemplo n.º 27
0
 def __init__(
     self,
     hostname: HostName,
     ipaddress: Optional[HostAddress],
     *,
     mode: Mode,
     source_type: SourceType,
     fetcher_type: FetcherType,
     description: str,
     id_: str,
     main_data_source: bool,
     cpu_tracking_id: str,
 ):
     super().__init__(
         hostname,
         ipaddress,
         mode=mode,
         source_type=source_type,
         fetcher_type=fetcher_type,
         description=description,
         default_raw_data=AgentRawData(),
         default_host_sections=AgentHostSections(),
         id_=id_,
         cpu_tracking_id=cpu_tracking_id,
         cache_dir=Path(cmk.utils.paths.tcp_cache_dir)
         if main_data_source else None,
         persisted_section_dir=(Path(cmk.utils.paths.var_dir) /
                                "persisted") if main_data_source else None,
     )
     # TODO: We should cleanup these old directories one day.
     #       Then we can remove this special case
     self.main_data_source: Final[bool] = main_data_source
Ejemplo n.º 28
0
    def test_update_with_store_and_persisting_raw_data(self, monkeypatch):
        monkeypatch.setattr(time,
                            "time",
                            lambda c=itertools.count(1000, 50): next(c))
        section_store = MockStore(PersistedSections[AgentRawDataSection]({
            SectionName("stored"): (0, 0, [["canned", "section"]]),
        }))
        raw_data = AgentRawData(b"<<<fresh:persist(10)>>>\nhello section")
        parser = AgentParser(
            "testhost",
            section_store,
            check_interval=0,
            keep_outdated=True,
            translation={},
            encoding_fallback="ascii",
            simulation=False,
            logger=logging.getLogger("test"),
        )

        ahs = parser.parse(raw_data, selection=NO_SELECTION)
        assert ahs.sections == {
            SectionName("fresh"): [["hello", "section"]],
            SectionName("stored"): [["canned", "section"]],
        }
        assert ahs.cache_info == {
            SectionName("stored"): (0, 0),
            SectionName("fresh"): (1000, -990),
        }
        assert ahs.piggybacked_raw_data == {}
        assert section_store.load() == PersistedSections[AgentRawDataSection]({
            SectionName("stored"): (0, 0, [["canned", "section"]]),
            SectionName("fresh"): (1000, 10, [["hello", "section"]]),
        })
Ejemplo n.º 29
0
    def test_result_answer(self, count):
        fetcher_payload = AgentResultMessage(AgentRawData(69 * b"\xff"))
        fetcher_stats = ResultStats(Snapshot.null())
        fetcher_message = FetcherMessage(
            FetcherHeader(
                FetcherType.TCP,
                PayloadType.AGENT,
                status=42,
                payload_length=len(fetcher_payload),
                stats_length=len(fetcher_stats),
            ),
            fetcher_payload,
            fetcher_stats,
        )
        fetcher_messages = list(repeat(fetcher_message, count))
        timeout = 7

        message = CMCMessage.result_answer(fetcher_messages, timeout,
                                           Snapshot.null())
        assert isinstance(repr(message), str)
        assert CMCMessage.from_bytes(bytes(message)) == message
        assert message.header.name == "fetch"
        assert message.header.state == CMCHeader.State.RESULT
        assert message.header.log_level.strip() == ""
        assert message.header.payload_length == len(message) - len(
            message.header)
        assert message.header.payload_length == len(message.payload)
Ejemplo n.º 30
0
    def test_nameless_sections_are_skipped(self, parser, store):
        raw_data = AgentRawData(
            b"\n".join(
                (
                    b"<<<a_section>>>",
                    b"a first line",
                    b"a second line",
                    b"<<<:cached(10, 5)>>>",
                    b"ignored first line",
                    b"ignored second line",
                    b"<<<b_section>>>",
                    b"b first line",
                    b"b second line",
                    b"<<<>>>",
                    b"ignored third line",
                    b"ignored forth line",
                    b"<<<c_section>>>",
                    b"c first line",
                    b"c second line",
                )
            )
        )

        ahs = parser.parse(raw_data, selection=NO_SELECTION)
        assert ahs.sections == {
            SectionName("a_section"): [["a", "first", "line"], ["a", "second", "line"]],
            SectionName("b_section"): [["b", "first", "line"], ["b", "second", "line"]],
            SectionName("c_section"): [["c", "first", "line"], ["c", "second", "line"]],
        }
        assert ahs.cache_info == {}
        assert ahs.piggybacked_raw_data == {}
        assert store.load() == {}