Esempio n. 1
0
 def test_host_check_file_path(self,
                               config_path: VersionedConfigPath) -> None:
     assert core_nagios.HostCheckStore.host_check_file_path(
         config_path, HostName("abc")) == Path(
             Path(config_path),
             "host_checks",
             "abc",
         )
Esempio n. 2
0
    def page(self) -> cmk.gui.pages.PageResult:
        """Renders an iframe to view the content of the RobotMK log file"""
        site_id, host_name, service_description = _get_mandatory_request_vars()

        breadcrumb: Breadcrumb = make_service_breadcrumb(
            HostName(host_name), service_description)
        title = self._title() + _(" of service %s on host %s") % (
            service_description, host_name)
        try:
            content = _get_html_from_livestatus(site_id, host_name,
                                                service_description)
        except MKLivestatusNotFoundError:
            html.header(
                title=title,
                breadcrumb=breadcrumb,
            )
            html.user_error(
                MKUserError(None,
                            _("You are not permitted to view this page")))
            return

        if not content[0]:
            html.header(
                title=title,
                breadcrumb=breadcrumb,
            )
            html.user_error(MKUserError(None, _("No logs could be found.")))
            return

        # Only render page menu with download option if content is not empty
        # and user is permitted
        html.header(
            title=title,
            breadcrumb=breadcrumb,
            page_menu=self._page_menu(breadcrumb, site_id, host_name,
                                      service_description),
        )

        iframe: str = "robotmk"
        html.iframe(
            content="",
            src=makeuri_contextless(
                request,
                [
                    ("site", site_id),
                    ("host", host_name),
                    ("service", service_description),
                ],
                filename="robotmk_report.py",
            ),
            name="robotmk_report",
            id_=iframe,
        )

        html.javascript('cmk.utils.content_scrollbar("main_page_content");')
        html.javascript(
            "cmk.utils.add_height_to_simple_bar_content_of_iframe(%s);" %
            json.dumps(iframe))
Esempio n. 3
0
def test_ruleset_matcher_get_host_ruleset_values_tags_duplicate_ids(
    monkeypatch: MonkeyPatch,
    rule_spec: RuleConditionsSpec,
    expected_result: Sequence[RuleValue],
) -> None:
    ts = Scenario()
    add_tag_config = TagConfig.from_config(
        {
            "aux_tags": [],
            "tag_groups": [
                {
                    "id": "grp1",
                    "tags": [
                        {
                            "aux_tags": [],
                            "id": "v1",
                            "title": "Value1",
                        },
                    ],
                    "title": "Group 1",
                },
                {
                    "id": "grp2",
                    "tags": [
                        {
                            "aux_tags": [],
                            "id": "v1",
                            "title": "Value1",
                        },
                    ],
                    "title": "Group 2",
                },
            ],
        }
    )
    ts.tags += add_tag_config
    ts.add_host(
        "host",
        tags={
            "grp1": "v1",
        },
    )
    config_cache = ts.apply(monkeypatch)
    matcher = config_cache.ruleset_matcher

    assert (
        list(
            matcher.get_host_ruleset_values(
                RulesetMatchObject(
                    host_name=HostName("host"),
                    service_description=None,
                ),
                ruleset=[rule_spec],
                is_binary=False,
            )
        )
        == expected_result
    )
Esempio n. 4
0
def test_cached_dns_lookup_uncached(monkeypatch: MonkeyPatch) -> None:

    config_ipcache = _empty()
    persisted_cache = _empty()

    patch_config_cache(monkeypatch, config_ipcache)
    patch_persisted_cache(monkeypatch, persisted_cache)
    patch_actual_lookup(monkeypatch,
                        {(HostName("test_host"), socket.AF_INET): "3.1.4.1"})

    assert (ip_lookup.cached_dns_lookup(
        HostName("test_host"),
        family=socket.AF_INET,
        force_file_cache_renewal=False,
    ) == "3.1.4.1")
    assert config_ipcache.pop(
        (HostName("test_host"), socket.AF_INET)) == "3.1.4.1"
    assert persisted_cache.pop(
        (HostName("test_host"), socket.AF_INET)) == "3.1.4.1"

    assert (ip_lookup.cached_dns_lookup(
        HostName("test_host"),
        family=socket.AF_INET,
        force_file_cache_renewal=True,
    ) == "3.1.4.1")
    assert config_ipcache[(HostName("test_host"), socket.AF_INET)] == "3.1.4.1"
    assert persisted_cache[(HostName("test_host"),
                            socket.AF_INET)] == "3.1.4.1"
Esempio n. 5
0
def test_get_section_cluster_kwargs(required_sections: Sequence[str],
                                    expected_result: Dict[str, Any]) -> None:

    node1_sections = HostSections[AgentRawDataSection](
        sections={
            SectionName("one"): NODE_1,
            SectionName("two"): NODE_1,
            SectionName("three"): NODE_1,
        })

    node2_sections = HostSections[AgentRawDataSection](
        sections={
            SectionName("two"): NODE_2,
            SectionName("three"): NODE_2,
        })

    parsed_sections_broker = ParsedSectionsBroker({
        HostKey(HostName("node1"), HostAddress("127.0.0.1"), SourceType.HOST):
        (
            ParsedSectionsResolver(section_plugins=[
                SECTION_ONE, SECTION_TWO, SECTION_THREE, SECTION_FOUR
            ], ),
            SectionsParser(host_sections=node1_sections),
        ),
        HostKey(HostName("node2"), HostAddress("127.0.0.1"), SourceType.HOST):
        (
            ParsedSectionsResolver(section_plugins=[
                SECTION_ONE, SECTION_TWO, SECTION_THREE, SECTION_FOUR
            ], ),
            SectionsParser(host_sections=node2_sections),
        ),
    })

    kwargs = get_section_cluster_kwargs(
        parsed_sections_broker,
        [
            HostKey(HostName("node1"), HostAddress("127.0.0.1"),
                    SourceType.HOST),
            HostKey(HostName("node2"), HostAddress("127.0.0.1"),
                    SourceType.HOST),
        ],
        [ParsedSectionName(n) for n in required_sections],
    )

    assert expected_result == kwargs
Esempio n. 6
0
def test_basic_get_host_ruleset_values_subfolders(monkeypatch: MonkeyPatch) -> None:
    ts = Scenario()
    ts.add_host(HostName("abc"))
    ts.add_host(HostName("xyz"))
    ts.add_host(HostName("lvl1"), host_path="/lvl1/hosts.mk")
    ts.add_host(HostName("lvl2"), host_path="/lvl1/lvl2/hosts.mk")
    ts.add_host(HostName("lvl1a"), host_path="/lvl1_a/hosts.mk")
    config_cache = ts.apply(monkeypatch)
    matcher = config_cache.ruleset_matcher

    assert (
        list(
            matcher.get_host_ruleset_values(
                RulesetMatchObject(host_name=HostName("xyz"), service_description=None),
                ruleset=ruleset,
                is_binary=False,
            )
        )
        == []
    )
    assert (
        list(
            matcher.get_host_ruleset_values(
                RulesetMatchObject(host_name=HostName("lvl1"), service_description=None),
                ruleset=ruleset,
                is_binary=False,
            )
        )
        == ["LEVEL1"]
    )
    assert (
        list(
            matcher.get_host_ruleset_values(
                RulesetMatchObject(host_name=HostName("lvl2"), service_description=None),
                ruleset=ruleset,
                is_binary=False,
            )
        )
        == ["LEVEL1", "LEVEL2"]
    )
    assert (
        list(
            matcher.get_host_ruleset_values(
                RulesetMatchObject(host_name=HostName("lvl1a"), service_description=None),
                ruleset=ruleset,
                is_binary=False,
            )
        )
        == []
    )
Esempio n. 7
0
def test_piggyback_default_time_settings():
    time_settings: piggyback.PiggybackTimeSettings = [
        (None, "max_cache_age", piggyback_max_cachefile_age)
    ]
    piggybacked_hostname = HostName("test-host")
    piggyback.get_piggyback_raw_data(piggybacked_hostname, time_settings)
    piggyback.get_source_and_piggyback_hosts(time_settings)
    piggyback.has_piggyback_raw_data(piggybacked_hostname, time_settings)
    piggyback.cleanup_piggyback_files(time_settings)
Esempio n. 8
0
def test_get_piggyback_raw_data_successful(time_settings):
    for raw_data_info in piggyback.get_piggyback_raw_data(
            HostName("test-host"), time_settings):
        assert raw_data_info.source_hostname == "source1"
        assert raw_data_info.file_path.endswith("/test-host/source1")
        assert raw_data_info.successfully_processed is True
        assert raw_data_info.reason == "Successfully processed from source 'source1'"
        assert raw_data_info.reason_status == 0
        assert raw_data_info.raw_data == b"<<<check_mk>>>\nlala\n"
Esempio n. 9
0
 def from_str(command: str) -> "Command":
     serial, host_name, mode_name, timeout = command.split(sep=";",
                                                           maxsplit=3)
     return Command(
         config_path=VersionedConfigPath(int(serial)),
         host_name=HostName(host_name),
         mode=Mode.CHECKING if mode_name == "checking" else Mode.DISCOVERY,
         timeout=int(timeout),
     )
Esempio n. 10
0
 def fetcher(self, file_cache: AgentFileCache) -> TCPFetcher:
     return TCPFetcher(
         file_cache,
         family=socket.AF_INET,
         address=("1.2.3.4", 6556),
         host_name=HostName("irrelevant_for_this_test"),
         timeout=0.1,
         encryption_settings={"use_regular": "allow"},
     )
Esempio n. 11
0
def test_get_rrd_data_point_max(cfg_setup, max_entries, result):
    from_time, until_time = 1543430040, 1543502040
    timeseries = cmk.utils.prediction.get_rrd_data(HostName("test-prediction"),
                                                   "CPU load", "load15", "MAX",
                                                   from_time, until_time,
                                                   max_entries)
    assert timeseries.start <= from_time
    assert timeseries.end >= until_time
    assert (timeseries.step, len(timeseries.values)) == result
Esempio n. 12
0
    def _get_hostnames_from_filters(self) -> Set[HostName]:
        # Determine hosts from filters
        filter_headers = self._get_filter_headers()
        query = "GET hosts\nColumns: name"
        if filter_headers:
            query += "\n%s" % filter_headers

        with sites.only_sites(request.var("site")):
            return {HostName(x[0]) for x in sites.live().query(query)}
Esempio n. 13
0
 def from_str(command: str) -> "Command":
     raw_serial, host_name, mode_name, timeout = command.split(sep=";",
                                                               maxsplit=3)
     return Command(
         serial=ConfigSerial(raw_serial),
         host_name=HostName(host_name),
         mode=Mode.CHECKING if mode_name == "checking" else Mode.DISCOVERY,
         timeout=int(timeout),
     )
Esempio n. 14
0
    def test_clear(self, tmp_path: Path) -> None:
        ip_lookup.IPLookupCache({
            (HostName("host1"), socket.AF_INET):
            "127.0.0.1"
        }).save_persisted()

        ip_lookup_cache = ip_lookup.IPLookupCache({})
        ip_lookup_cache.load_persisted()
        assert ip_lookup_cache[(HostName("host1"),
                                socket.AF_INET)] == "127.0.0.1"

        ip_lookup_cache.clear()

        assert not ip_lookup_cache

        ip_lookup_cache = ip_lookup.IPLookupCache({})
        ip_lookup_cache.load_persisted()
        assert not ip_lookup_cache
Esempio n. 15
0
def _get_up_hosts() -> Optional[Set[HostName]]:
    query = "GET hosts\nColumns: name state"
    try:
        response = livestatus.LocalConnection().query(query)
        return {HostName(name) for name, state in response if state == 0}
    except (livestatus.MKLivestatusNotFoundError,
            livestatus.MKLivestatusSocketError):
        pass
    return None
Esempio n. 16
0
 def file_cache(self) -> DefaultAgentFileCache:
     return DefaultAgentFileCache(
         HostName("hostname"),
         base_path=Path(os.devnull),
         max_age=MaxAge.none(),
         disabled=True,
         use_outdated=True,
         simulation=False,
     )
Esempio n. 17
0
def clone_file_cache(file_cache: FileCache) -> FileCache:
    return type(file_cache)(
        HostName(file_cache.hostname),
        base_path=file_cache.base_path,
        max_age=file_cache.max_age,
        disabled=file_cache.disabled,
        use_outdated=file_cache.use_outdated,
        simulation=file_cache.simulation,
    )
Esempio n. 18
0
 def file_cache(self, request) -> FileCache:
     return request.param(
         HostName("hostname"),
         base_path=Path(os.devnull),
         max_age=MaxAge.none(),
         disabled=True,
         use_outdated=False,
         simulation=True,
     )
def test_get_rrd_data_incomplete(cfg_setup, timerange, result):
    from_time, until_time = timerange
    timeseries = cmk.utils.prediction.get_rrd_data(
        HostName("test-prediction"), "CPU load", "load15", "MAX", from_time, until_time
    )

    assert timeseries.start <= from_time
    assert timeseries.end >= until_time
    assert (timeseries.step, timeseries.values) == result
Esempio n. 20
0
 def file_cache(self, path: Path, request):
     return request.param(
         HostName("hostname"),
         base_path=path,
         max_age=MaxAge(checking=0, discovery=999, inventory=0),
         disabled=False,
         use_outdated=False,
         simulation=False,
     )
Esempio n. 21
0
def test_dump_precompiled_hostcheck_not_existing_host(
        monkeypatch: MonkeyPatch, config_path: VersionedConfigPath) -> None:
    config_cache = Scenario().apply(monkeypatch)
    host_check = core_nagios._dump_precompiled_hostcheck(
        config_cache,
        config_path,
        HostName("not-existing"),
    )
    assert host_check is None
def test_get_rrd_data_fails(cfg_setup):
    timestamp = time.mktime(datetime.strptime("2018-11-28 12", "%Y-%m-%d %H").timetuple())
    _, from_time, until_time, _ = prediction._get_prediction_timegroup(
        int(timestamp), prediction._PREDICTION_PERIODS["hour"]
    )

    # Fail to get data, because non-existent check
    with pytest.raises(MKGeneralException, match="Cannot get historic metrics via Livestatus:"):
        cmk.utils.prediction.get_rrd_data(
            HostName("test-prediction"), "Nonexistent check", "util", "MAX", from_time, until_time
        )

    # Empty response, because non-existent perf_data variable
    timeseries = cmk.utils.prediction.get_rrd_data(
        HostName("test-prediction"), "CPU load", "untracked_prefdata", "MAX", from_time, until_time
    )

    assert timeseries == cmk.utils.prediction.TimeSeries([0, 0, 0])
Esempio n. 23
0
def test_active_check_arguments_not_existing_password(capsys):
    assert (
        core_config.active_check_arguments(
            HostName("bla"), "blub", ["arg1", ("store", "pw-id", "--password=%s"), "arg3"]
        )
        == "--pwstore=2@11@pw-id 'arg1' '--password=***' 'arg3'"
    )
    stderr = capsys.readouterr().err
    assert 'The stored password "pw-id" used by service "blub" on host "bla"' in stderr
Esempio n. 24
0
def test_cached_dns_lookup_is_config_cached_none(
        monkeypatch: MonkeyPatch) -> None:
    patch_config_cache(
        monkeypatch,
        {(HostName("the_host_that_raised"), socket.AF_INET6): None})
    patch_persisted_cache(monkeypatch, _empty())
    patch_actual_lookup(monkeypatch, _empty())

    assert (ip_lookup.cached_dns_lookup(
        HostName("the_host_that_raised"),
        family=socket.AF_INET6,
        force_file_cache_renewal=False,
    ) is None)
    assert (ip_lookup.cached_dns_lookup(
        HostName("the_host_that_raised"),
        family=socket.AF_INET6,
        force_file_cache_renewal=True,
    ) is None)
Esempio n. 25
0
def test_lookup_mgmt_board_ip_address_unresolveable(
        monkeypatch: MonkeyPatch, tags: Dict[str, str],
        family: socket.AddressFamily) -> None:
    hostname = HostName("unresolveable-hostname")
    ts = Scenario()
    ts.add_host(hostname, tags=tags)
    ts.apply(monkeypatch)
    host_config = config.get_config_cache().get_host_config(hostname)
    assert config.lookup_mgmt_board_ip_address(host_config) is None
Esempio n. 26
0
    def _postprocess_meshes(self, meshes: Meshes) -> Meshes:
        """Create a central node and add all monitoring sites as childs"""

        central_node: _MeshNode = {
            "name": HostName(""),
            "hostname": HostName("Checkmk"),
            "outgoing": [],
            "incoming": [],
            "node_type": "topology_center",
        }

        site_nodes: Dict[HostName, _MeshNode] = {}
        for mesh in meshes:
            for node_name in mesh:
                site = self._known_hosts[node_name]["site"]
                site_node_name = HostName(_("Site %s") % site)
                site_nodes.setdefault(
                    site_node_name,
                    {
                        "node_type": "topology_site",
                        "outgoing": [central_node["name"]],
                        "incoming": [],
                    },
                )
                outgoing_nodes = self._known_hosts.get(
                    node_name, {"outgoing": []})["outgoing"]
                # Only attach this node to the site if it has no parents that are visible
                # in the current mesh
                if not mesh.intersection(outgoing_nodes):
                    site_nodes[site_node_name]["incoming"].append(node_name)

        central_node["incoming"] = list(site_nodes.keys())
        self._known_hosts[central_node["name"]] = central_node

        combinator_mesh: Set[HostName] = set()
        for node_name, settings in site_nodes.items():
            self._known_hosts[node_name] = settings
            combinator_mesh.add(node_name)
            combinator_mesh.update(set(settings["incoming"]))

        meshes.append(combinator_mesh)
        self._integrate_new_meshes(meshes)

        return meshes
Esempio n. 27
0
def test_lookup_mgmt_board_ip_address_ipv4_host(monkeypatch: MonkeyPatch,
                                                hostname_str: str,
                                                tags: Dict[str, str],
                                                result_address: str) -> None:
    hostname = HostName(hostname_str)
    ts = Scenario()
    ts.add_host(hostname, tags=tags)
    ts.apply(monkeypatch)
    host_config = config.get_config_cache().get_host_config(hostname)
    assert config.lookup_mgmt_board_ip_address(host_config) == result_address
Esempio n. 28
0
def test__get_service_filter_func(monkeypatch, parameters_rediscovery, result):
    monkeypatch.setattr(config, "service_description",
                        lambda h, c, i: "Test Description")

    service_filters = _filters.ServiceFilters.from_settings(
        parameters_rediscovery)
    service = _filters.Service(CheckPluginName("check_plugin_name"), "item",
                               "Test Description", None)
    assert service_filters.new is not None
    assert service_filters.new(HostName("hostname"), service) is result
Esempio n. 29
0
 def _from_json(cls, serialized: Mapping[str, Any]) -> "TCPFetcher":
     serialized_ = copy.deepcopy(dict(serialized))
     address: Tuple[Optional[HostAddress], int] = serialized_.pop("address")
     host_name = HostName(serialized_.pop("host_name"))
     return cls(
         DefaultAgentFileCache.from_json(serialized_.pop("file_cache")),
         address=address,
         host_name=host_name,
         **serialized_,
     )
Esempio n. 30
0
def _create_configs_special_auth() -> List[SNMPHostConfig]:
    return [
        SNMPHostConfig(
            is_ipv6_primary=False,
            hostname=HostName("noauthnopriv"),
            ipaddress="127.0.0.1",
            credentials=(
                "noAuthNoPriv",
                "noAuthNoPrivUser",
            ),
            port=1339,
            is_bulkwalk_host=False,
            is_snmpv2or3_without_bulkwalk_host=False,
            bulk_walk_size_of=10,
            timing={},
            oid_range_limits=[],
            snmpv3_contexts=[],
            character_encoding=None,
            is_usewalk_host=False,
            snmp_backend=SNMPBackendEnum.CLASSIC,
        ),
        SNMPHostConfig(
            is_ipv6_primary=False,
            hostname=HostName("authonly"),
            ipaddress="127.0.0.1",
            credentials=(
                "authNoPriv",
                "md5",
                "authOnlyUser",
                "authOnlyUser",
            ),
            port=1337,
            is_bulkwalk_host=False,
            is_snmpv2or3_without_bulkwalk_host=False,
            bulk_walk_size_of=10,
            timing={},
            oid_range_limits=[],
            snmpv3_contexts=[],
            character_encoding=None,
            is_usewalk_host=False,
            snmp_backend=SNMPBackendEnum.CLASSIC,
        ),
    ]