def test_check_crash_report_read_snmp_info(monkeypatch):
    Scenario().apply(monkeypatch)
    config.load_checks(
        check_api.get_check_api_context,
        ["%s/uptime" % cmk.utils.paths.checks_dir, "%s/snmp_uptime" % cmk.utils.paths.checks_dir],
    )

    cache_path = Path(cmk.utils.paths.data_source_cache_dir, "snmp", "testhost")
    cache_path.parent.mkdir(parents=True, exist_ok=True)
    with cache_path.open("w", encoding="utf-8") as f:
        f.write("[]\n")

    try:
        raise Exception("DING")
    except Exception:
        crash = crash_reporting.CheckCrashReport.from_exception_and_context(
            hostname=HostName("testhost"),
            check_plugin_name="snmp_uptime",
            check_plugin_kwargs={},
            is_manual_check=False,
            description="Uptime",
            text="Output",
        )

    assert isinstance(crash, crash_reporting.CheckCrashReport)
    assert crash.agent_output is None
    assert crash.snmp_info == b"[]\n"
Esempio n. 2
0
def test_attribute_defaults(monkeypatch):
    ipaddress = "1.2.3.4"
    hostname = HostName("testhost")

    ts = Scenario()
    ts.add_host(hostname)
    ts.apply(monkeypatch)

    source = TCPSource(hostname, ipaddress)
    monkeypatch.setattr(source, "file_cache_base_path", Path("/my/path/"))
    assert source.fetcher_configuration == {
        "file_cache": {
            "hostname": "testhost",
            "disabled": False,
            "max_age": MaxAge.none(),
            "base_path": "/my/path",
            "simulation": False,
            "use_outdated": False,
            "use_only_cache": False,
        },
        "family": socket.AF_INET,
        "address": (ipaddress, 6556),
        "host_name": str(hostname),
        "timeout": 5.0,
        "encryption_settings": {
            "use_realtime": "enforce",
            "use_regular": "disable",
        },
    }
    assert source.description == "TCP: %s:%s" % (ipaddress, 6556)
    assert source.id == "agent"
Esempio n. 3
0
 def fetcher(self, file_cache: NoCache) -> PiggybackFetcher:
     return PiggybackFetcher(
         file_cache,
         hostname=HostName("host"),
         address=HostAddress("1.2.3.4"),
         time_settings=[],
     )
Esempio n. 4
0
 def fetcher_pysnmp(self, file_cache: SNMPFileCache) -> SNMPFetcher:
     return SNMPFetcher(
         file_cache,
         sections={},
         on_error=OnError.RAISE,
         missing_sys_description=False,
         do_status_data_inventory=False,
         section_store_path="/tmp/db",
         snmp_config=SNMPHostConfig(
             is_ipv6_primary=False,
             hostname=HostName("bob"),
             ipaddress="1.2.3.4",
             credentials="public",
             port=42,
             is_bulkwalk_host=False,
             is_snmpv2or3_without_bulkwalk_host=False,
             bulk_walk_size_of=0,
             timing={},
             oid_range_limits={},
             snmpv3_contexts=[],
             character_encoding=None,
             is_usewalk_host=False,
             snmp_backend=SNMPBackendEnum.PYSNMP if
             not cmk_version.is_raw_edition() else SNMPBackendEnum.CLASSIC,
         ),
     )
Esempio n. 5
0
    def test_keep_outdated_true(self, logger, monkeypatch):
        monkeypatch.setattr(time,
                            "time",
                            lambda c=itertools.count(1000, 50): next(c))

        section_store = MockStore(
            "/dev/null",
            PersistedSections[SNMPRawDataSection]({
                SectionName("section"): (500, 600, [["old"]]),
            }),
            logger=logger,
        )
        parser = SNMPParser(
            HostName("testhost"),
            section_store,
            check_intervals={SectionName("section"): 42},
            keep_outdated=True,
            logger=logger,
        )
        shs = parser.parse({}, selection=NO_SELECTION)
        assert shs.sections == {SectionName("section"): [["old"]]}
        assert shs.cache_info == {
            SectionName("section"): (500, 100),
        }
        assert shs.piggybacked_raw_data == {}
        assert not section_store.load(
        ) == PersistedSections[SNMPRawDataSection]({
            SectionName("section"): (1000, 1042, [["old"]]),
        })
Esempio n. 6
0
    def test_keep_outdated_true(self, logger, monkeypatch):
        monkeypatch.setattr(time,
                            "time",
                            lambda c=itertools.count(1000, 50): next(c))

        raw_data = AgentRawData(b"<<<another_section>>>")
        section_store = MockStore(
            "/dev/null",
            PersistedSections[AgentRawDataSection]({
                SectionName("section"): (500, 600, []),
            }),
            logger=logger,
        )
        parser = AgentParser(
            HostName("testhost"),
            section_store,
            check_interval=42,
            keep_outdated=True,
            translation={},
            encoding_fallback="ascii",
            simulation=False,
            logger=logger,
        )

        ahs = parser.parse(raw_data, selection=NO_SELECTION)
        assert ahs.sections == {
            SectionName("another_section"): [],
            SectionName("section"): [],
        }
        assert ahs.cache_info == {SectionName("section"): (500, 100)}
        assert ahs.piggybacked_raw_data == {}
        assert section_store.load() == PersistedSections[AgentRawDataSection]({
            SectionName("section"): (500, 600, []),
        })
    def test_attribute_defaults(
        self,
        special_agent_id,
        ipaddress,
        agent_dir,
        expected_args,
        expected_stdin,
        monkeypatch,
    ):
        hostname = HostName("testhost")
        params: Dict[Any, Any] = {}
        Scenario().add_host(hostname).apply(monkeypatch)

        # end of setup

        source = SpecialAgentSource(
            hostname,
            ipaddress,
            special_agent_id=special_agent_id,
            params=params,
        )
        assert source.hostname == hostname
        assert source.ipaddress == ipaddress
        assert source.cmdline == (  #
            str(agent_dir / "special" / ("agent_%s" % special_agent_id)) + " " + expected_args
        )
        assert source.stdin == expected_stdin
        assert source.id == "special_%s" % special_agent_id
Esempio n. 8
0
    def test_update_existing_entry(self, tmp_path: Path) -> None:
        cache_id1 = HostName("host1"), socket.AF_INET
        cache_id2 = HostName("host2"), socket.AF_INET

        ip_lookup_cache = ip_lookup.IPLookupCache({
            cache_id1: "1",
            cache_id2: "2",
        })
        ip_lookup_cache.save_persisted()

        ip_lookup_cache[cache_id1] = "127.0.0.1"

        new_cache_instance = ip_lookup.IPLookupCache({})
        new_cache_instance.load_persisted()
        assert new_cache_instance[cache_id1] == "127.0.0.1"
        assert new_cache_instance[cache_id2] == "2"
Esempio n. 9
0
 def from_json(cls, serialized: Dict[str, Any]) -> "TopologySettingsJSON":
     for key in [
             "growth_root_nodes", "growth_forbidden_nodes",
             "growth_continue_nodes"
     ]:
         serialized[key] = {HostName(hn) for hn in serialized[key]}
     return cls(**serialized)
Esempio n. 10
0
def get_source_hostnames(
        piggybacked_hostname: Optional[HostName] = None) -> Sequence[HostName]:
    if piggybacked_hostname is None:
        return [
            HostName(source_host.name)
            for piggybacked_host_folder in _get_piggybacked_host_folders()
            for source_host in _get_piggybacked_host_sources(
                piggybacked_host_folder)
        ]

    piggybacked_host_folder = cmk.utils.paths.piggyback_dir / Path(
        piggybacked_hostname)
    return [
        HostName(source_host.name) for source_host in
        _get_piggybacked_host_sources(piggybacked_host_folder)
    ]
Esempio n. 11
0
    def page(self) -> PageResult:
        """Determines the hosts to be shown"""
        user.need_permission("general.parent_child_topology")

        topology_settings = TopologySettings()
        if request.var("filled_in"):
            # Parameters from the check_mk filters
            topology_settings.growth_root_nodes = self._get_hostnames_from_filters(
            )
        elif request.var("host_name"):
            # Explicit host_name. Used by icon linking to Topology
            topology_settings.growth_root_nodes = {
                HostName(html.request.get_str_input_mandatory("host_name"))
            }
        else:
            # Default page without further context
            topology_settings.growth_root_nodes = self._get_default_view_hostnames(
                topology_settings.growth_auto_max_nodes)

        if request.has_var("topology_settings"):
            # These parameters are usually generated within javascript through user interactions
            try:
                settings_from_var = json.loads(
                    request.get_str_input_mandatory("topology_settings"))
                for key, value in settings_from_var.items():
                    setattr(topology_settings, key, value)
            except (TypeError, ValueError):
                raise MKGeneralException(
                    _("Invalid topology_settings %r") % topology_settings)

        self.show_topology(topology_settings)
Esempio n. 12
0
def test_get_section_kwargs(
        required_sections: Sequence[str],
        expected_result: Dict[str, Dict[str, str]]) -> None:

    node_sections = HostSections[AgentRawDataSection](
        sections={
            SectionName("one"): NODE_1,
            SectionName("two"): NODE_1,
            SectionName("three"): NODE_1,
        })

    host_key = HostKey(HostName("node1"), HostAddress("127.0.0.1"),
                       SourceType.HOST)

    parsed_sections_broker = ParsedSectionsBroker({
        host_key: (
            ParsedSectionsResolver(section_plugins=[
                SECTION_ONE, SECTION_TWO, SECTION_THREE, SECTION_FOUR
            ]),
            SectionsParser(host_sections=node_sections),
        ),
    })

    kwargs = get_section_kwargs(
        parsed_sections_broker,
        host_key,
        [ParsedSectionName(n) for n in required_sections],
    )

    assert expected_result == kwargs
Esempio n. 13
0
def test_get_check_table__static_checks_win(monkeypatch: MonkeyPatch) -> None:
    hostname_str = "df_host"
    hostname = HostName(hostname_str)
    plugin_name = CheckPluginName("df")
    item = "/snap/core/9066"

    ts = Scenario()
    ts.add_host(hostname)
    ts.set_option(
        "static_checks",
        {
            "filesystem": [
                ((str(plugin_name), item, {
                    "source": "static"
                }), [], [hostname_str]),
            ],
        },
    )
    ts.set_autochecks(
        hostname_str,
        [AutocheckEntry(plugin_name, item, {"source": "auto"}, {})])
    ts.apply(monkeypatch)

    chk_table = check_table.get_check_table(hostname)

    # assert check table is populated as expected
    assert len(chk_table) == 1
    # assert static checks won
    effective_params = chk_table[(plugin_name,
                                  item)].parameters.evaluate(lambda _: True)
    assert effective_params[
        "source"] == "static"  # type: ignore[index,call-overload]
Esempio n. 14
0
    def _get_default_view_hostnames(self, max_nodes: int) -> Set[HostName]:
        """Returns all hosts without any parents"""
        query = "GET hosts\nColumns: name\nFilter: parents ="
        site = request.var("site")
        with sites.prepend_site(), sites.only_sites(
                None if site is None else SiteId(site)):
            hosts = [(x[0], x[1]) for x in sites.live().query(query)]

        # If no explicit site is set and the number of initially displayed hosts
        # exceeds the auto growth range, only the hosts of the master site are shown
        if len(hosts) > max_nodes:
            hostnames = {HostName(x[1]) for x in hosts if x[0] == omd_site()}
        else:
            hostnames = {HostName(x[1]) for x in hosts}

        return hostnames
Esempio n. 15
0
    def test_write(self, config_path: VersionedConfigPath) -> None:
        hostname = HostName("aaa")
        store = core_nagios.HostCheckStore()

        assert config.delay_precompile is False

        assert not store.host_check_source_file_path(config_path,
                                                     hostname).exists()
        assert not store.host_check_file_path(config_path, hostname).exists()

        store.write(config_path, hostname, "xyz")

        assert store.host_check_source_file_path(config_path,
                                                 hostname).exists()
        assert store.host_check_file_path(config_path, hostname).exists()

        with store.host_check_source_file_path(config_path,
                                               hostname).open() as s:
            assert s.read() == "xyz"

        with store.host_check_file_path(config_path, hostname).open("rb") as p:
            assert p.read().startswith(importlib.util.MAGIC_NUMBER)

        assert os.access(store.host_check_file_path(config_path, hostname),
                         os.X_OK)
Esempio n. 16
0
def test_discovered_host_labels_path(discovered_host_labels_dir):
    hostname = "test.host.de"
    config.get_config_cache().initialize()
    assert not (discovered_host_labels_dir / hostname).exists()
    DiscoveredHostLabelsStore(HostName(hostname)).save(
        DiscoveredHostLabels(HostLabel("foo", "1.5")).to_dict())
    assert (discovered_host_labels_dir / (hostname + ".mk")).exists()
Esempio n. 17
0
    def _create_random_hosts(self, folder, count, folders, levels):
        if levels == 0:
            hosts_to_create: List[Tuple[HostName, Dict, None]] = []
            while len(hosts_to_create) < count:
                host_name = "random_%010d" % int(random.random() * 10000000000)
                hosts_to_create.append((HostName(host_name), {
                    "ipaddress": "127.0.0.1"
                }, None))
            folder.create_hosts(hosts_to_create)
            return count

        total_created = 0
        created = 0
        while created < folders:
            created += 1
            i = 1
            while True:
                folder_name = "folder_%02d" % i
                if not folder.has_subfolder(folder_name):
                    break
                i += 1

            subfolder = folder.create_subfolder(folder_name,
                                                "Subfolder %02d" % i, {})
            total_created += self._create_random_hosts(subfolder, count,
                                                       folders, levels - 1)
        return total_created
Esempio n. 18
0
    def test_check_intervals_updates_persisted(self, logger, monkeypatch):
        monkeypatch.setattr(time,
                            "time",
                            lambda c=itertools.count(1000, 50): next(c))

        section_store = MockStore(
            "/dev/null",
            PersistedSections[SNMPRawDataSection]({
                SectionName("section"): (0, 0, [["old"]]),
            }),
            logger=logger,
        )
        _new: Sequence[SNMPRawDataSection] = [["new"]
                                              ]  # For the type checker only
        raw_data: SNMPRawData = {SectionName("section"): _new}
        parser = SNMPParser(
            HostName("testhost"),
            section_store,
            check_intervals={SectionName("section"): 42},
            keep_outdated=True,
            logger=logger,
        )
        shs = parser.parse(raw_data, selection=NO_SELECTION)
        assert shs.sections == {SectionName("section"): [["new"]]}
        assert shs.cache_info == {}
        assert shs.piggybacked_raw_data == {}
        assert section_store.load() == PersistedSections[SNMPRawDataSection]({
            SectionName("section"): (1000, 1042, [["new"]]),
        })
Esempio n. 19
0
def test_calculate_data_for_prediction(cfg_setup, utcdate, timezone, params):

    period_info = prediction._PREDICTION_PERIODS[params["period"]]
    with on_time(utcdate, timezone):
        now = int(time.time())
        assert callable(period_info.groupby)
        timegroup = period_info.groupby(now)[0]

        time_windows = prediction._time_slices(
            now, int(params["horizon"] * 86400), period_info, timegroup
        )

    hostname, service_description, dsname = HostName("test-prediction"), "CPU load", "load15"
    rrd_datacolumn = cmk.utils.prediction.rrd_datacolum(
        hostname, service_description, dsname, "MAX"
    )
    data_for_pred = prediction._calculate_data_for_prediction(time_windows, rrd_datacolumn)

    expected_reference = _load_expected_result(
        "%s/tests/integration/cmk/base/test-files/%s/%s" % (repo_path(), timezone, timegroup)
    )

    assert isinstance(expected_reference, dict)
    assert sorted(asdict(data_for_pred)) == sorted(expected_reference)
    for key in expected_reference:
        if key == "points":
            for cal, ref in zip(data_for_pred.points, expected_reference["points"]):
                assert cal == pytest.approx(ref, rel=1e-12, abs=1e-12)
        else:
            assert getattr(data_for_pred, key) == expected_reference[key]
Esempio n. 20
0
    def test_update_store_with_newest(self, logger):
        section_store = MockStore(
            "/dev/null",
            PersistedSections[AgentRawDataSection]({
                SectionName("section"): (0, 0, [["oldest"]]),
            }),
            logger=logger,
        )
        raw_data = AgentRawData(b"<<<section>>>\nnewest")
        parser = AgentParser(
            HostName("testhost"),
            section_store,
            check_interval=0,
            keep_outdated=True,
            translation={},
            encoding_fallback="ascii",
            simulation=False,
            logger=logger,
        )

        ahs = parser.parse(raw_data, selection=NO_SELECTION)
        assert ahs.sections == {SectionName("section"): [["newest"]]}
        assert ahs.cache_info == {}
        assert ahs.piggybacked_raw_data == {}
        assert section_store.load() == PersistedSections[AgentRawDataSection]({
            SectionName("section"): (0, 0, [["oldest"]]),
        })
Esempio n. 21
0
 def _collect_infos(self) -> Optional[HostName]:
     query = "GET hosts\nColumns: host_name\nFilter: host_labels = 'cmk/check_mk_server' 'yes'\n"
     result = livestatus.LocalConnection().query(query)
     try:
         return HostName(result[0][0])
     except IndexError:
         return None
Esempio n. 22
0
    def test_update_with_persisted_and_store(self, logger):
        section_store = MockStore(
            "/dev/null",
            PersistedSections[SNMPRawDataSection]({
                SectionName("stored"): (0, 0, [["old"]]),
            }),
            logger=logger,
        )
        _new: Sequence[SNMPRawDataSection] = [["new"]
                                              ]  # For the type checker only
        raw_data: SNMPRawData = {SectionName("fresh"): _new}
        parser = SNMPParser(
            HostName("testhost"),
            section_store,
            check_intervals={},
            keep_outdated=True,
            logger=logger,
        )

        shs = parser.parse(raw_data, selection=NO_SELECTION)
        assert shs.sections == {
            SectionName("stored"): [["old"]],
            SectionName("fresh"): [["new"]],
        }
        assert shs.cache_info == {SectionName("stored"): (0, 0)}
        assert shs.piggybacked_raw_data == {}
        assert section_store.load() == {
            SectionName("stored"): (0, 0, [["old"]]),
        }
Esempio n. 23
0
    def test_attribute_defaults(self, monkeypatch):
        hostname = "testhost"
        ipaddress = "1.2.3.4"

        ts = Scenario()
        ts.add_host(hostname)
        ts.set_option("management_protocol", {hostname: "snmp"})
        ts.set_option(
            "host_attributes",
            {
                hostname: {
                    "management_address": ipaddress
                },
            },
        )
        ts.apply(monkeypatch)

        source = SNMPSource.management_board(
            HostName(hostname),
            ipaddress,
            force_cache_refresh=False,
            selected_sections=NO_SELECTION,
            on_scan_error=OnError.RAISE,
        )
        assert source.description == (
            "Management board - SNMP "
            "(Community: 'public', Bulk walk: no, Port: 161, Backend: Classic)"
        )
Esempio n. 24
0
    def test_update_with_empty_persisted(self, logger):
        section_store = MockStore(
            "/dev/null",
            PersistedSections[SNMPRawDataSection](
                {
                    SectionName("stored"): (0, 0, [["old"]]),
                }
            ),
            logger=logger,
        )
        raw_data: SNMPRawData = {}
        parser = SNMPParser(
            HostName("testhost"),
            section_store,
            check_intervals={},
            keep_outdated=True,
            logger=logger,
        )

        shs = parser.parse(raw_data, selection=NO_SELECTION)
        assert shs.sections == {SectionName("stored"): [["old"]]}
        assert shs.cache_info == {SectionName("stored"): (0, 0)}
        assert shs.piggybacked_raw_data == {}
        assert section_store.load() == {
            SectionName("stored"): (0, 0, [["old"]]),
        }
Esempio n. 25
0
    def __init__(self, name):
        self.name = name
        self.info = {}
        # we cant use the current_host context, b/c some tests rely on a persistent
        # item state across several calls to run_check
        import cmk.base.plugin_contexts  # pylint: disable=import-outside-toplevel

        cmk.base.plugin_contexts._hostname = HostName("non-existent-testhost")
Esempio n. 26
0
def _get_up_hosts() -> Optional[Set[HostName]]:
    query = "GET hosts\nColumns: name state"
    try:
        response = livestatus.LocalConnection().query(query)
        return {HostName(name) for name, state in response if state == 0}
    except (livestatus.MKLivestatusNotFoundError, livestatus.MKLivestatusSocketError):
        pass
    return None
Esempio n. 27
0
 def host_sections(self):
     return HostSections[Sequence[AgentRawDataSection]](
         {
             SectionName("section0"): [["first", "line"],
                                       ["second", "line"]],
             SectionName("section1"): [["third", "line"], ["forth", "line"]
                                       ],
         },
         cache_info={
             SectionName("section0"): (1, 2),
             SectionName("section1"): (3, 4),
         },
         piggybacked_raw_data={
             HostName("host0"): [b"first line", b"second line"],
             HostName("host1"): [b"third line", b"forth line"],
         },
     )
Esempio n. 28
0
def test_get_section_cluster_kwargs(
    required_sections: List[str], expected_result: Dict[str, Any]
) -> None:

    node1_sections = AgentHostSections(
        sections={
            SectionName("one"): NODE_1,
            SectionName("two"): NODE_1,
            SectionName("three"): NODE_1,
        }
    )

    node2_sections = AgentHostSections(
        sections={
            SectionName("two"): NODE_2,
            SectionName("three"): NODE_2,
        }
    )

    parsed_sections_broker = ParsedSectionsBroker(
        {
            HostKey(HostName("node1"), HostAddress("127.0.0.1"), SourceType.HOST): (
                ParsedSectionsResolver(
                    section_plugins=[SECTION_ONE, SECTION_TWO, SECTION_THREE, SECTION_FOUR],
                ),
                SectionsParser(host_sections=node1_sections),
            ),
            HostKey(HostName("node2"), HostAddress("127.0.0.1"), SourceType.HOST): (
                ParsedSectionsResolver(
                    section_plugins=[SECTION_ONE, SECTION_TWO, SECTION_THREE, SECTION_FOUR],
                ),
                SectionsParser(host_sections=node2_sections),
            ),
        }
    )

    kwargs = get_section_cluster_kwargs(
        parsed_sections_broker,
        [
            HostKey(HostName("node1"), HostAddress("127.0.0.1"), SourceType.HOST),
            HostKey(HostName("node2"), HostAddress("127.0.0.1"), SourceType.HOST),
        ],
        [ParsedSectionName(n) for n in required_sections],
    )

    assert expected_result == kwargs
Esempio n. 29
0
 def test_host_check_file_path(self,
                               config_path: VersionedConfigPath) -> None:
     assert core_nagios.HostCheckStore.host_check_file_path(
         config_path, HostName("abc")) == Path(
             Path(config_path),
             "host_checks",
             "abc",
         )
def test_get_rrd_data_point_max(cfg_setup, max_entries, result):
    from_time, until_time = 1543430040, 1543502040
    timeseries = cmk.utils.prediction.get_rrd_data(
        HostName("test-prediction"), "CPU load", "load15", "MAX", from_time, until_time, max_entries
    )
    assert timeseries.start <= from_time
    assert timeseries.end >= until_time
    assert (timeseries.step, len(timeseries.values)) == result