Пример #1
0
def _run_fetchers_from_file(host_name: HostName, file_name: Path, mode: Mode,
                            timeout: int) -> None:
    """ Writes to the stdio next data:
    Count Answer        Content               Action
    ----- ------        -------               ------
    1     Result        Fetcher Blob          Send to the checker
    0..n  Log           Message to be logged  Log
    1     End of reply  empty                 End IO
    *) Fetcher blob contains all answers from all fetcher objects including failed
    **) file_name is serial/host_name.json
    ***) timeout is not used at the moment"""
    with file_name.open() as f:
        data = json.load(f)

    fetchers = data["fetchers"]

    # CONTEXT: AT the moment we call fetcher-executors sequentially (due to different reasons).
    # Possibilities:
    # Sequential: slow fetcher may block other fetchers.
    # Asyncio: every fetcher must be asyncio-aware. This is ok, but even estimation requires time
    # Threading: some fetcher may be not thread safe(snmp, for example). May be dangerous.
    # Multiprocessing: CPU and memory(at least in terms of kernel) hungry. Also duplicates
    # functionality of the Microcore.

    messages: List[protocol.FetcherMessage] = []
    with timeout_control(host_name, timeout):
        try:
            # fill as many messages as possible before timeout exception raised
            for entry in fetchers:
                messages.append(run_fetcher(entry, mode))
        except MKTimeout as exc:
            # fill missing entries with timeout errors
            messages.extend([
                protocol.FetcherMessage.timeout(
                    FetcherType[entry["fetcher_type"]],
                    exc,
                    Snapshot.null(),
                ) for entry in fetchers[len(messages):]
            ])

    logger.debug("Produced %d messages", len(messages))
    write_bytes(bytes(protocol.CMCMessage.result_answer(*messages)))
    for msg in filter(
            lambda msg: msg.header.payload_type is protocol.PayloadType.ERROR,
            messages,
    ):
        logger.log(msg.header.status, "Error in %s fetcher: %r",
                   msg.header.fetcher_type.name, msg.raw_data.error)
        logger.debug("".join(
            traceback.format_exception(
                msg.raw_data.error.__class__,
                msg.raw_data.error,
                msg.raw_data.error.__traceback__,
            )))
Пример #2
0
    def test_serialization(self, count):
        fetcher_payload = AgentResultMessage(AgentRawData(69 * b"\xff"))
        fetcher_stats = ResultStats(Snapshot.null())
        fetcher_message = FetcherMessage(
            FetcherHeader(
                FetcherType.TCP,
                PayloadType.AGENT,
                status=42,
                payload_length=len(fetcher_payload),
                stats_length=len(fetcher_stats),
            ),
            fetcher_payload,
            fetcher_stats,
        )
        fetcher_messages = list(repeat(fetcher_message, count))
        serial = 1337
        host_name = "my_host_name"
        timeout = 7

        message = CMCMessage.result_answer(
            fetcher_messages,
            serial=serial,
            host_name=host_name,
            timeout=timeout,
            duration=Snapshot.null(),
        )

        other = CMCMessage.from_bytes(bytes(message))
        assert other == message
        assert isinstance(repr(other), str)
        assert other.header.name == "fetch"
        assert other.header.state == CMCHeader.State.RESULT
        assert other.header.log_level.strip() == ""
        assert other.header.payload_length == len(message) - len(
            message.header)
        assert other.header.payload_length == len(message.payload)
        assert isinstance(other.payload, CMCResults)
        assert other.payload.id_.serial == serial
        assert other.payload.id_.host_name == host_name
        assert other.payload.stats.timeout == timeout
        assert other.payload.stats.duration == Snapshot.null()
Пример #3
0
 def messages(self):
     msg = []
     for payload, stats in (
         (AgentResultMessage(AgentRawData(42 * b"\0")),
          ResultStats(Snapshot.null())),
         (AgentResultMessage(AgentRawData(12 * b"\0")),
          ResultStats(Snapshot.null())),
     ):
         msg.append(
             FetcherMessage(
                 FetcherHeader(
                     FetcherType.TCP,
                     PayloadType.AGENT,
                     status=69,
                     payload_length=len(payload),
                     stats_length=len(stats),
                 ),
                 payload,
                 stats,
             ))
     return msg
Пример #4
0
def _run_fetchers_from_file(
    config_path: ConfigPath,
    host_name: HostName,
    mode: Mode,
    timeout: int,
) -> None:
    """ Writes to the stdio next data:
    Count Answer        Content               Action
    ----- ------        -------               ------
    1     Result        Fetcher Blob          Send to the checker
    0..n  Log           Message to be logged  Log
    1     End of reply  empty                 End IO

    """
    messages: List[protocol.FetcherMessage] = []
    with Timeout(
            timeout,
            message=
            f"Fetcher for host \"{host_name}\" timed out after {timeout} seconds",
    ) as timeout_manager:
        fetchers = tuple(_parse_config(config_path, host_name))
        try:
            # fill as many messages as possible before timeout exception raised
            for fetcher in fetchers:
                messages.append(_run_fetcher(fetcher, mode))
        except MKTimeout as exc:
            # fill missing entries with timeout errors
            messages.extend(
                protocol.FetcherMessage.timeout(
                    FetcherType.from_fetcher(fetcher),
                    exc,
                    Snapshot.null(),
                ) for fetcher in fetchers[len(messages):])

    if timeout_manager.signaled:
        messages = _replace_netsnmp_obfuscated_timeout(messages,
                                                       timeout_manager.message)

    logger.debug("Produced %d messages", len(messages))
    write_bytes(bytes(protocol.CMCMessage.result_answer(*messages)))
    for msg in filter(
            lambda msg: msg.header.payload_type is protocol.PayloadType.ERROR,
            messages,
    ):
        logger.log(msg.header.status, "Error in %s fetcher: %r",
                   msg.header.fetcher_type.name, msg.raw_data.error)
        logger.debug("".join(
            traceback.format_exception(
                msg.raw_data.error.__class__,
                msg.raw_data.error,
                msg.raw_data.error.__traceback__,
            )))
Пример #5
0
    def test_multiple_sources_from_the_same_host(
        self,
        hostname,
        ipaddress,
        mode,
        config_cache,
        host_config,
    ):
        sources = [
            ProgramSource.ds(
                hostname,
                ipaddress,
                mode=mode,
                template="",
            ),
            TCPSource(
                hostname,
                ipaddress,
                mode=mode,
            ),
        ]

        broker = ParsedSectionsBroker(
            _collect_host_sections(
                nodes=make_nodes(
                    config_cache,
                    host_config,
                    ipaddress,
                    mode=mode,
                    sources=sources,
                ),
                file_cache_max_age=0,
                fetcher_messages=[
                    FetcherMessage.from_raw_data(
                        result.OK(source.default_raw_data),
                        Snapshot.null(),
                        source.fetcher_type,
                    ) for source in sources
                ],
                selected_sections=NO_SELECTION,
            )[0])
        assert len(broker) == 1

        key = HostKey(hostname, ipaddress, SourceType.HOST)
        assert key in broker

        section = broker[key]

        assert len(section.sections) == 1
        # yapf: disable
        assert (section.sections[SectionName("section_name_%s" % hostname)]
                == len(sources) * [["section_content"]])
Пример #6
0
def _replace_netsnmp_obfuscated_timeout(
        messages: Iterable[protocol.FetcherMessage],
        timeout_msg: str) -> List[protocol.FetcherMessage]:
    return [
        protocol.FetcherMessage.timeout(
            FetcherType.SNMP,
            MKTimeout(timeout_msg),
            Snapshot.null(),
        ) if (msg.header.fetcher_type is FetcherType.SNMP
              and msg.header.payload_type is protocol.PayloadType.ERROR
              and isinstance(msg.raw_data.error, SystemError)) else msg
        for msg in messages
    ]
Пример #7
0
 def error(cls, fetcher_type: FetcherType, exc: Exception) -> "FetcherMessage":
     stats = ResultStats(Snapshot.null())
     payload = ErrorResultMessage(exc)
     return cls(
         FetcherHeader(
             fetcher_type,
             PayloadType.ERROR,
             status=logging.CRITICAL,
             payload_length=len(payload),
             stats_length=len(stats),
         ),
         payload,
         stats,
     )
Пример #8
0
def make_error_message(fetcher_type: FetcherType, exc: Exception) -> FetcherMessage:
    stats = L3Stats(Snapshot.null())
    payload = ErrorPayload(exc)
    return FetcherMessage(
        FetcherHeader(
            fetcher_type,
            PayloadType.ERROR,
            status=logging.CRITICAL,
            payload_length=len(payload),
            stats_length=len(stats),
        ),
        payload,
        stats,
    )
Пример #9
0
    def test_multiple_sources_from_different_hosts(self, hostname, ipaddress, mode, config_cache, host_config):
        sources = [
            ProgramSource.ds(hostname + "0", ipaddress,
                                                    mode=mode,
                                                   template="",),
            TCPSource(hostname + "1", ipaddress, mode=mode,),
            TCPSource(hostname + "2", ipaddress, mode=mode,),
        ]

        mhs = MultiHostSections()
        update_host_sections(
            mhs,
            make_nodes(
                config_cache,
                host_config,
                ipaddress,
                mode=mode,
                sources=sources,
            ),
            max_cachefile_age=0,
            selected_raw_sections=None,
            host_config=host_config,
            fetcher_messages=[
                FetcherMessage.from_raw_data(
                    result.OK(source.default_raw_data),
                    L3Stats(Snapshot.null()),
                    source.fetcher_type,
                )
                for source in sources
            ],
        )
        assert len(mhs) == 1

        key = HostKey(hostname, ipaddress, SourceType.HOST)
        assert key in mhs

        section = mhs[key]
        assert isinstance(section, AgentHostSections)

        assert len(section.sections) == len(sources)
        for source in sources:
            # yapf: disable
            assert (
                section.sections[SectionName("section_name_%s" % source.hostname)]
                == [["section_content"]])
Пример #10
0
    def test_multiple_sources_from_different_hosts(self, hostname, ipaddress, mode, config_cache, host_config):
        sources = [
            ProgramSource.ds(hostname + "0", ipaddress, mode=mode, template=""),
            TCPSource(hostname + "1", ipaddress, mode=mode),
            TCPSource(hostname + "2", ipaddress, mode=mode),
        ]

        nodes = make_nodes(
            config_cache,
            host_config,
            ipaddress,
            mode=mode,
            sources=sources,
        )

        broker = ParsedSectionsBroker()
        update_host_sections(
            broker,
            nodes,
            max_cachefile_age=0,
            host_config=host_config,
            fetcher_messages=[
                FetcherMessage.from_raw_data(
                    result.OK(source.default_raw_data),
                    Snapshot.null(),
                    source.fetcher_type,
                )
                for _h, _i, sources in nodes for source in sources
            ],
            selected_sections=NO_SELECTION,
        )
        assert len(broker) == 1

        key = HostKey(hostname, ipaddress, SourceType.HOST)
        assert key in broker

        section = broker[key]

        assert len(section.sections) == len(sources)
        for source in sources:
            # yapf: disable
            assert (
                section.sections[SectionName("section_name_%s" % source.hostname)]
                == [["section_content"]])
Пример #11
0
    def test_one_snmp_source(self, hostname, ipaddress, mode, config_cache,
                             host_config):
        broker = ParsedSectionsBroker()
        update_host_sections(
            broker,
            make_nodes(
                config_cache,
                host_config,
                ipaddress,
                mode=mode,
                sources=[
                    SNMPSource.snmp(
                        hostname,
                        ipaddress,
                        mode=mode,
                        selected_sections=NO_SELECTION,
                        on_scan_error="raise",
                    ),
                ],
            ),
            max_cachefile_age=0,
            host_config=host_config,
            fetcher_messages=[
                FetcherMessage.from_raw_data(
                    result.OK({}),
                    Snapshot.null(),
                    FetcherType.SNMP,
                ),
            ],
            selected_sections=NO_SELECTION,
        )
        assert len(broker) == 1

        key = HostKey(hostname, ipaddress, SourceType.HOST)
        assert key in broker

        section = broker[key]

        assert len(section.sections) == 1
        assert section.sections[SectionName("section_name_%s" %
                                            hostname)] == [["section_content"]]
Пример #12
0
    def test_one_snmp_source(self, hostname, ipaddress, mode, config_cache,
                             host_config):
        mhs = MultiHostSections()
        update_host_sections(
            mhs,
            make_nodes(
                config_cache,
                host_config,
                ipaddress,
                mode=mode,
                sources=[
                    SNMPSource.snmp(
                        hostname,
                        ipaddress,
                        mode=mode,
                        selected_sections=NO_SELECTION,
                    ),
                ],
            ),
            max_cachefile_age=0,
            host_config=host_config,
            fetcher_messages=[
                FetcherMessage.from_raw_data(
                    result.OK(SNMPRawData({})),
                    Snapshot.null(),
                    FetcherType.SNMP,
                ),
            ],
            selected_sections=NO_SELECTION,
        )
        assert len(mhs) == 1

        key = HostKey(hostname, ipaddress, SourceType.HOST)
        assert key in mhs

        section = mhs[key]

        assert len(section.sections) == 1
        assert section.sections[SectionName("section_name_%s" %
                                            hostname)] == [["section_content"]]
Пример #13
0
    def test_no_sources(self, cluster, nodes, config_cache, host_config, mode):
        made_nodes = make_nodes(
            config_cache,
            host_config,
            None,
            mode=mode,
            sources=(),
        )

        broker = ParsedSectionsBroker()
        update_host_sections(
            broker,
            made_nodes,
            max_cachefile_age=0,
            host_config=host_config,
            fetcher_messages=[
                # We do not pass sources explicitly but still append Piggyback.
                FetcherMessage.from_raw_data(
                    result.OK(AgentRawData(b"")),
                    Snapshot.null(),
                    FetcherType.PIGGYBACK,
                ) for _n in made_nodes
            ],
            selected_sections=NO_SELECTION,
        )
        assert len(broker) == len(nodes)

        key_clu = HostKey(cluster, None, SourceType.HOST)
        assert key_clu not in broker

        for hostname, addr in nodes.items():
            key = HostKey(hostname, addr, SourceType.HOST)
            assert key in broker

            section = broker[key]
            # yapf: disable
            assert (section.sections[SectionName("section_name_%s" % hostname)]
                    == [["section_content_%s" % hostname]])
            assert not section.cache_info
            assert not section.piggybacked_raw_data
Пример #14
0
    def test_no_sources(self, cluster, nodes, config_cache, host_config, mode):
        mhs = MultiHostSections()
        update_host_sections(
            mhs,
            make_nodes(
                config_cache,
                host_config,
                None,
                mode=mode,
                sources=(),
            ),
            max_cachefile_age=0,
            selected_raw_sections=None,
            host_config=host_config,
            fetcher_messages=[
                # We do not pass sources explicitly but still append Piggyback.
                FetcherMessage.from_raw_data(
                    result.OK(b""),
                    L3Stats(Snapshot.null()),
                    FetcherType.PIGGYBACK,
                ),
            ],
        )
        assert len(mhs) == len(nodes)

        key_clu = HostKey(cluster, None, SourceType.HOST)
        assert key_clu not in mhs

        for hostname, addr in nodes.items():
            key = HostKey(hostname, addr, SourceType.HOST)
            assert key in mhs

            section = mhs[key]
            # yapf: disable
            assert (section.sections[SectionName("section_name_%s" % hostname)]
                    == [["section_content"]])
            assert not section.cache_info
            assert not section.piggybacked_raw_data
            assert not section.persisted_sections
Пример #15
0
    def test_one_snmp_source(self, hostname, ipaddress, mode, config_cache,
                             host_config):
        host_sections = _collect_host_sections(
            nodes=make_nodes(
                config_cache,
                host_config,
                ipaddress,
                mode=mode,
                sources=[
                    SNMPSource.snmp(
                        hostname,
                        ipaddress,
                        mode=mode,
                        selected_sections=NO_SELECTION,
                        force_cache_refresh=False,
                        on_scan_error="raise",
                    ),
                ],
            ),
            file_cache_max_age=0,
            fetcher_messages=[
                FetcherMessage.from_raw_data(
                    result.OK({}),
                    Snapshot.null(),
                    FetcherType.SNMP,
                ),
            ],
            selected_sections=NO_SELECTION,
        )[0]
        assert len(host_sections) == 1

        key = HostKey(hostname, ipaddress, SourceType.HOST)
        assert key in host_sections

        section = host_sections[key]

        assert len(section.sections) == 1
        assert section.sections[SectionName("section_name_%s" %
                                            hostname)] == [["section_content"]]
Пример #16
0
def run_fetcher(entry: Dict[str, Any], mode: Mode) -> FetcherMessage:
    """ Entrypoint to obtain data from fetcher objects.    """

    try:
        fetcher_type = FetcherType[entry["fetcher_type"]]
    except KeyError as exc:
        raise RuntimeError from exc

    log.logger.debug("Executing fetcher: %s", entry["fetcher_type"])

    try:
        fetcher_params = entry["fetcher_params"]
    except KeyError as exc:
        stats = L3Stats(Snapshot.null())
        payload = ErrorPayload(exc)
        return FetcherMessage(
            FetcherHeader(
                fetcher_type,
                PayloadType.ERROR,
                status=logging.CRITICAL,
                payload_length=len(payload),
                stats_length=len(stats),
            ),
            payload,
            stats,
        )

    try:
        with CPUTracker() as tracker, fetcher_type.from_json(
                fetcher_params) as fetcher:
            raw_data = fetcher.fetch(mode)
    except Exception as exc:
        raw_data = result.Error(exc)

    return FetcherMessage.from_raw_data(
        raw_data,
        L3Stats(tracker.duration),
        fetcher_type,
    )
Пример #17
0
    def test_multiple_sources_from_the_same_host(
        self,
        hostname,
        ipaddress,
        config_cache,
        host_config,
    ):
        sources = [
            ProgramSource.ds(hostname, ipaddress, template=""),
            TCPSource(hostname, ipaddress),
        ]

        host_sections = _collect_host_sections(
            fetched=[
                (
                    source,
                    FetcherMessage.from_raw_data(
                        result.OK(source.default_raw_data),
                        Snapshot.null(),
                        source.fetcher_type,
                    ),
                )
                for source in sources
            ],
            file_cache_max_age=file_cache.MaxAge.none(),
            selected_sections=NO_SELECTION,
        )[0]
        assert len(host_sections) == 1

        key = HostKey(hostname, ipaddress, SourceType.HOST)
        assert key in host_sections

        section = host_sections[key]

        assert len(section.sections) == 1
        assert section.sections[SectionName("section_name_%s" % hostname)] == len(sources) * [
            ["section_content"]
        ]
Пример #18
0
    def test_one_nonsnmp_source(self, hostname, ipaddress, mode, config_cache,
                                host_config, source):
        source = source(hostname, ipaddress, mode=mode)
        assert source.source_type is SourceType.HOST

        mhs = MultiHostSections()
        update_host_sections(
            mhs,
            make_nodes(
                config_cache,
                host_config,
                ipaddress,
                mode=mode,
                sources=[source],
            ),
            max_cachefile_age=0,
            selected_raw_sections=None,
            host_config=host_config,
            fetcher_messages=[
                FetcherMessage.from_raw_data(
                    result.OK(source.default_raw_data),
                    L3Stats(Snapshot.null()),
                    source.fetcher_type,
                ),
            ],
        )
        assert len(mhs) == 1

        key = HostKey(hostname, ipaddress, source.source_type)
        assert key in mhs

        section = mhs[key]
        assert isinstance(section, AgentHostSections)

        assert len(section.sections) == 1
        assert section.sections[SectionName("section_name_%s" %
                                            hostname)] == [["section_content"]]
    def test_multiple_sources_from_different_hosts(self, hostname, ipaddress,
                                                   config_cache, host_config):
        sources = [
            ProgramSource.ds(hostname + "0", ipaddress, template=""),
            TCPSource(hostname + "1", ipaddress),
            TCPSource(hostname + "2", ipaddress),
        ]

        nodes = make_nodes(config_cache,
                           host_config,
                           ipaddress,
                           sources=sources)

        host_sections = _collect_host_sections(
            nodes=nodes,
            file_cache_max_age=file_cache.MaxAge.none(),
            fetcher_messages=[
                FetcherMessage.from_raw_data(
                    result.OK(source.default_raw_data),
                    Snapshot.null(),
                    source.fetcher_type,
                ) for _h, _i, sources in nodes for source in sources
            ],
            selected_sections=NO_SELECTION,
        )[0]
        assert len(host_sections) == 1

        key = HostKey(hostname, ipaddress, SourceType.HOST)
        assert key in host_sections

        section = host_sections[key]

        assert len(section.sections) == len(sources)
        for source in sources:
            assert section.sections[SectionName(
                "section_name_%s" % source.hostname)] == [["section_content"]]
Пример #20
0
    def test_one_nonsnmp_source(self, hostname, ipaddress, mode, config_cache,
                                host_config, source):
        source = source(hostname, ipaddress, mode=mode)
        assert source.source_type is SourceType.HOST

        broker = ParsedSectionsBroker()
        update_host_sections(
            broker,
            make_nodes(
                config_cache,
                host_config,
                ipaddress,
                mode=mode,
                sources=[source],
            ),
            max_cachefile_age=0,
            host_config=host_config,
            fetcher_messages=[
                FetcherMessage.from_raw_data(
                    result.OK(source.default_raw_data),
                    Snapshot.null(),
                    source.fetcher_type,
                ),
            ],
            selected_sections=NO_SELECTION,
        )
        assert len(broker) == 1

        key = HostKey(hostname, ipaddress, source.source_type)
        assert key in broker

        section = broker[key]

        assert len(section.sections) == 1
        assert section.sections[SectionName("section_name_%s" %
                                            hostname)] == [["section_content"]]
Пример #21
0
 def duration(self):
     return Snapshot.null()
Пример #22
0
 def l3stats(self):
     return ResultStats(Snapshot.null())
Пример #23
0
 def stats_length(self):
     return len(ResultStats(Snapshot.null()))
Пример #24
0
 def payload(self, messages):
     return CMCResults(messages, FetcherResultsStats(7, Snapshot.null()))
Пример #25
0
 def stats(self):
     return FetcherResultsStats(7, Snapshot.null())
def test_get_host_sections_cluster(monkeypatch, mocker):
    hostname = "testhost"
    hosts = {
        "host0": "10.0.0.0",
        "host1": "10.0.0.1",
        "host2": "10.0.0.2",
    }
    address = "1.2.3.4"
    tags = {"agent": "no-agent"}
    section_name = SectionName("test_section")
    config_cache = make_scenario(hostname, tags).apply(monkeypatch)
    host_config = config.HostConfig.make_host_config(hostname)

    def fake_lookup_ip_address(host_config, family=None):
        return hosts[host_config.hostname]

    def check(_, *args, **kwargs):
        return result.OK(AgentHostSections(sections={section_name: [[str(section_name)]]}))

    monkeypatch.setattr(
        config,
        "lookup_ip_address",
        fake_lookup_ip_address,
    )
    monkeypatch.setattr(
        Source,
        "parse",
        check,
    )
    mocker.patch.object(
        cmk.utils.piggyback,
        "remove_source_status_file",
        autospec=True,
    )
    mocker.patch.object(
        cmk.utils.piggyback,
        "_store_status_file_of",
        autospec=True,
    )

    # Create a cluster
    host_config.nodes = list(hosts.keys())

    nodes = make_nodes(
        config_cache,
        host_config,
        address,
        sources=make_sources(host_config, address),
    )

    host_sections = _collect_host_sections(
        nodes=nodes,
        file_cache_max_age=host_config.max_cachefile_age,
        fetcher_messages=[
                FetcherMessage.from_raw_data(
                    result.OK(source.default_raw_data),
                    Snapshot.null(),
                    source.fetcher_type,
                )
                for _h, _i, sources in nodes for source in sources
            ],
        selected_sections=NO_SELECTION,
    )[0]
    assert len(host_sections) == len(hosts) == 3
    cmk.utils.piggyback._store_status_file_of.assert_not_called()  # type: ignore[attr-defined]
    assert cmk.utils.piggyback.remove_source_status_file.call_count == 3  # type: ignore[attr-defined]

    for host, addr in hosts.items():
        remove_source_status_file = cmk.utils.piggyback.remove_source_status_file
        remove_source_status_file.assert_any_call(host)  # type: ignore[attr-defined]
        key = HostKey(host, addr, SourceType.HOST)
        assert key in host_sections
        section = host_sections[key]
        assert len(section.sections) == 1
        assert next(iter(section.sections)) == section_name
        assert not section.cache_info
        assert not section.piggybacked_raw_data
Пример #27
0
 def test_json_serialization_null(self, null: Snapshot) -> None:
     assert Snapshot.deserialize(json_identity(null.serialize())) == null
Пример #28
0
 def from_bytes(cls, data: bytes) -> FetcherResultsStats:
     conf_len = struct.unpack(cls.fmt, data[:cls.length])[0]
     conf = json.loads(data[cls.length:cls.length +
                            conf_len].decode("ascii"))
     return cls(conf["timeout"], Snapshot.deserialize(conf["duration"]))
Пример #29
0
 def from_bytes(cls, data: bytes) -> ResultStats:
     return ResultStats(
         Snapshot.deserialize(json.loads(data.decode("ascii"))["duration"]))
Пример #30
0
 def test_json_serialization_now(self, now) -> None:
     assert Snapshot.deserialize(json_identity(now.serialize())) == now