def test_compute_rates_multiple_disks(): disks = { "C:": DISK, "D:": DISK, } value_store: Dict[str, Any] = {} # first call should result in IgnoreResultsError, second call should yield rates with on_time(0, "UTC"): with pytest.raises(IgnoreResultsError): diskstat.compute_rates_multiple_disks( disks, value_store, _compute_rates_single_disk, ) with on_time(60, "UTC"): disks_w_rates = diskstat.compute_rates_multiple_disks( disks, value_store, _compute_rates_single_disk, ) for (name_in, disk_in), (name_out, disk_out) in zip( iter(disks.items()), iter(disks_w_rates.items()), ): assert name_in == name_out assert disk_out == {k: 0 for k in disk_in}
def test_check_smart_command_timeout_rate(): section_timeout = {"/dev/sda": {"Command_Timeout": 0}} now_simulated = 581792400, "UTC" with pytest.raises(GetRateError), on_time(*now_simulated): list(smart.check_smart_stats("/dev/sda", {"Command_Timeout": 0}, section_timeout)) # Simulate an accepted increment rate of the counter thirty_min_later = now_simulated[0] + 30 * 60, "UTC" section_timeout["/dev/sda"]["Command_Timeout"] = 1 with on_time(*thirty_min_later): assert list( smart.check_smart_stats("/dev/sda", {"Command_Timeout": 0}, section_timeout) ) == [ Result(state=State.OK, summary="Command timeout counter: 1"), Metric("Command_Timeout", 1.0), ] # Simulate an exceeding rate for command timeouts ten_sec_later = thirty_min_later[0] + 10, "UTC" section_timeout["/dev/sda"]["Command_Timeout"] = 5 with on_time(*ten_sec_later): assert list( smart.check_smart_stats("/dev/sda", {"Command_Timeout": 0}, section_timeout) ) == [ Result( state=State.CRIT, summary=( "Command timeout counter: 5 " "(counter increased more than 100 counts / h (!!). " "Value during discovery was: 0)" ), ), Metric("Command_Timeout", 5), ]
def test_check_heartbeat_crm_crit(section_2: Section) -> None: with on_time("2019-08-18 10:36:36", "UTC"): assert list( check_heartbeat_crm( { "dc": "hasi", "max_age": 60, "num_nodes": 1, "num_resources": 4, "show_failed_actions": True, }, section_2, ) ) == [ Result(state=State.CRIT, summary="DC: cluster (Expected hasi)"), Result(state=State.CRIT, summary="Nodes: 2 (Expected 1)"), Result(state=State.CRIT, summary="Resources: 6 (Expected 4)"), Result( state=State.WARN, summary= ("Failed: mysqldb1_lvm_monitor_10000 on cluster1 'unknown error' (1): call=158, " "status=Timed Out, exitreason='none', " "last-rc-change='Fri Feb 22 22:54:52 2019', queued=0ms, exec=0ms" ), ), ]
def test_sign_csr_with_local_ca() -> None: root_key = _make_private_key() root_cert = _make_root_certificate( _make_subject_name("peter"), 1, root_key, ) key = _make_private_key() csr = _make_csr( _make_subject_name("from_peter"), key, ) root_ca = RootCA(root_cert, root_key) with on_time(567892121, "UTC"): cert = root_ca.sign_csr(csr, 100) assert check_cn( cert, "from_peter", ) assert str(cert.not_valid_before) == "1987-12-30 19:48:41" assert str(cert.not_valid_after) == "1988-04-08 19:48:41" check_certificate_against_private_key( cert, key, ) # ensure that 'from_peter' is indeed signed by 'peter' check_certificate_against_public_key( cert, _rsa_public_key_from_cert_or_csr(root_cert), )
def test_check_oracle_instance_uptime_normal( fix_register: FixRegister) -> None: with on_time(1643360266, "UTC"): assert list(fix_register.check_plugins[CheckPluginName( "oracle_instance_uptime")].check_function( item="IC731", params={}, section=parse_oracle_instance([[ "IC731", "12.1.0.2.0", "OPEN", "ALLOWED", "STARTED", "2144847", "3190399742", "ARCHIVELOG", "PRIMARY", "YES", "IC73", "130920150251", ]]), )) == [ Result( state=State.OK, summary= "Up since 2022-01-03 13:10:19, uptime: 24 days, 19:47:27", ), Metric( "uptime", 2144847.0, ), ]
def test_cleanup_user_profiles_remove_abandoned(user_id: UserId) -> None: (profile := cmk.utils.paths.profile_dir.joinpath("profile")).mkdir() (bla := profile / "bla.mk").touch() with on_time("2018-04-15 16:50", "CET"): os.utime(bla, (time.time(), time.time())) userdb.UserProfileCleanupBackgroundJob()._do_cleanup() assert not profile.exists()
def test_sign_csr() -> None: root_key = make_private_key() root_cert = make_root_certificate( make_subject_name("peter"), 1, root_key, ) key = make_private_key() csr = make_csr( make_subject_name("from_peter"), key, ) with on_time(100, "UTC"): cert = sign_csr( csr, 2, root_cert, root_key, ) assert check_cn( cert, "from_peter", ) assert str(cert.not_valid_before) == "1970-01-01 00:01:40" assert str(cert.not_valid_after) == "1970-01-03 00:01:40" check_certificate_against_private_key( cert, key, ) # ensure that 'from_peter' is indeed signed by 'peter' check_certificate_against_public_key( cert, rsa_public_key_from_cert_or_csr(root_cert), )
def test_log_audit_with_object_diff(request_context): old = { "a": "b", "b": "c", } new = { "b": "c", } with on_time("2018-04-15 16:50", "CET"): log_audit( object_ref=None, action="bla", message="Message", user_id=UserId("calvin"), diff_text=make_diff_text(old, new), ) store = AuditLogStore(AuditLogStore.make_path()) assert store.read() == [ AuditLogStore.Entry( time=1523811000, object_ref=None, user_id="calvin", action="bla", text="Message", diff_text='Attribute "a" with value "b" removed.', ), ]
def test_check_warn_upon_old_update_check(duplicate: bool) -> None: with on_time(1645800081.5039608, "UTC"): actual = list( _check_cmk_agent_update( {}, { "agentupdate": " ".join((1 + duplicate) * ( "last_check 1645000081.5039608", "last_update 1645000181.5039608", "aghash 38bf6e44175732bc", "pending_hash 1234abcd5678efgh", "update_url https://server/site/check_mk", "error 503 Server Error: Service Unavailable", )) }, )) assert actual == [ Result(state=State.WARN, summary="Update error: 503 Server Error: Service Unavailable"), Result( state=State.WARN, summary= "Time since last update check: 9 days 6 hours (warn/crit at 2 days 0 hours/never)", ), Result(state=State.OK, notice="Last update check: Feb 16 2022 08:28:01"), Result(state=State.OK, summary="Last update: Feb 16 2022 08:29:41"), Result(state=State.OK, notice="Update URL: https://server/site/check_mk"), Result(state=State.OK, notice="Agent configuration: 38bf6e44"), Result(state=State.OK, notice="Pending installation: 1234abcd"), ]
def test_chrony_parse_valid(): with on_time(1628000000, "UTC"): assert chrony.parse_chrony([ ["Reference", "ID", ":", "55DCBEF6", "(kaesekuchen.ok)"], ["Stratum", ":", "3"], [ "Ref", "time", "(UTC)", ":", "Tue", "Jul", "09", "08:01:06", "2019" ], [ "System", "time", ":", "0.000275117", "seconds", "slow", "of", "NTP", "time" ], ["Last", "offset", ":", "-0.000442775", "seconds"], ["RMS", "offset", ":", "0.000999328", "seconds"], ["Frequency", ":", "2.054", "ppm", "fast"], ["Residual", "freq", ":", "-0.004", "ppm"], ["Skew", ":", "0.182", "ppm"], ["Root", "delay", ":", "0.023675382", "seconds"], ["Root", "dispersion", ":", "0.001886752", "seconds"], ["Update", "interval", ":", "1042.2", "seconds"], ["Leap", "status", ":", "Normal"], ]) == { "Reference ID": "55DCBEF6 (kaesekuchen.ok)", "Stratum": 3, "System time": 0.275117, "address": "(kaesekuchen.ok)", "last_sync": 65340734.0, }
def test_calculate_data_for_prediction(cfg_setup, utcdate, timezone, params): period_info = prediction._PREDICTION_PERIODS[params["period"]] with on_time(utcdate, timezone): now = int(time.time()) assert callable(period_info.groupby) timegroup = period_info.groupby(now)[0] time_windows = prediction._time_slices( now, int(params["horizon"] * 86400), period_info, timegroup ) hostname, service_description, dsname = HostName("test-prediction"), "CPU load", "load15" rrd_datacolumn = cmk.utils.prediction.rrd_datacolum( hostname, service_description, dsname, "MAX" ) data_for_pred = prediction._calculate_data_for_prediction(time_windows, rrd_datacolumn) expected_reference = _load_expected_result( "%s/tests/integration/cmk/base/test-files/%s/%s" % (repo_path(), timezone, timegroup) ) assert isinstance(expected_reference, dict) assert sorted(asdict(data_for_pred)) == sorted(expected_reference) for key in expected_reference: if key == "points": for cal, ref in zip(data_for_pred.points, expected_reference["points"]): assert cal == pytest.approx(ref, rel=1e-12, abs=1e-12) else: assert getattr(data_for_pred, key) == expected_reference[key]
def test_check_hitachi_hnas_volume(value_store_patch, item, params, section, expected) -> None: """Hitachi volume check function returns expected results for different volume params""" with on_time("2021-07-22 12:00", "CET"): results = tuple(check_hitachi_hnas_volume(item, params, section)) assert results == expected
def test_uptime_check_zero(): with on_time("2018-04-15 16:50", "CET"): assert list(uptime_utils.check({}, uptime_utils.Section(0, None))) == [ Result(state=State.OK, summary="Up since Apr 15 2018 18:50:00"), Result(state=State.OK, summary="Uptime: 0 seconds"), Metric("uptime", 0.0), ]
def test_filters_filter_table(request_context, test, monkeypatch): # Needed for DeploymentTristateFilter test def deployment_states(host_name): return { "abc": { "target_aghash": "abc", }, "zzz": {}, }[host_name] if not cmk_version.is_raw_edition(): import cmk.gui.cee.agent_bakery as agent_bakery # pylint: disable=redefined-outer-name,import-outside-toplevel,no-name-in-module monkeypatch.setattr(agent_bakery, "get_cached_deployment_status", deployment_states) # Needed for FilterInvFloat test monkeypatch.setattr(cmk.gui.inventory, "get_inventory_table", get_inventory_table_patch) monkeypatch.setattr(cmk.gui.inventory, "get_inventory_attribute", get_inventory_table_patch) # Needed for FilterAggrServiceUsed test def is_part_of_aggregation_patch(host, service): return {("h", "srv1"): True}.get((host, service), False) monkeypatch.setattr(cmk.gui.bi, "is_part_of_aggregation", is_part_of_aggregation_patch) with on_time("2018-04-15 16:50", "CET"): context: VisualContext = {test.ident: dict(test.request_vars)} # TODO: Fix this for real... if not cmk_version.is_raw_edition or test.ident != "deployment_has_agent": filt = cmk.gui.plugins.visuals.utils.filter_registry[test.ident] assert filt.filter_table(context, test.rows) == test.expected_rows
def fixture_mock_time(): """Use this fixture for simple time + zone mocking Use this fixture instead of directly invoking on_time in case you don't need a specific time. Calling this once instead of on_time() a lot of times saves execution time. """ with on_time(1572247138, "CET"): yield
def test_validate_certificate_not_yet_valid(root_ca: RootCA) -> None: with on_time(time() + 24 * 3600, "UTC"): cert, _priv_key = root_ca.new_signed_cert("abc123", 100) with pytest.raises( CertificateValidationError, match="Client certificate not yet valid", ): _validate_certificate(cert)
def test_validate_certificate_expired(root_ca: RootCA) -> None: with on_time(1638174087, "UTC"): cert, _priv_key = root_ca.new_signed_cert("abc123", 1) with pytest.raises( CertificateValidationError, match="Client certificate expired", ): _validate_certificate(cert)
def test_tuple_value_to_json_conversion(value, result): with on_time("2020-03-02", "UTC"): assert (vs.Tuple(elements=[vs.AbsoluteDate( ), vs.AbsoluteDate()]).value_to_html(value) == result) json_value = vs.Tuple(elements=[vs.AbsoluteDate( ), vs.AbsoluteDate()]).value_to_json(value) assert (vs.Tuple(elements=[vs.AbsoluteDate( ), vs.AbsoluteDate()]).value_from_json(json_value) == value)
def test_validate_certificate_not_yet_valid(ca: CertificateAuthority) -> None: with on_time(time() + 24 * 3600, "UTC"): cert, _priv_key = ca._certificate_from_root("abc123") with pytest.raises( CertificateValidationError, match="Client certificate not yet valid", ): _validate_certificate(cert)
def test_check_timesyncd_negative_time( string_table: StringTable, params: timesyncd.CheckParams, result: CheckResult, ): wrong_server_time = 1569922392.37 - 60, "UTC" section = timesyncd.parse_timesyncd(string_table) with on_time(*wrong_server_time): assert list(timesyncd.check_timesyncd(params, section)) == result
def test_check_fritz_uptime( section: Section, expected_result: CheckResult, ) -> None: with on_time(1647515259, "UTC"): assert (list(check_fritz_uptime( {}, section, )) == expected_result)
def test_check_timesyncd_freeze( string_table: StringTable, params: timesyncd.CheckParams, result: CheckResult, ): server_time = 1569922392.37 + 60 * 60 * 22 + 60, "UTC" section = timesyncd.parse_timesyncd(string_table) with on_time(*server_time): assert list(timesyncd.check_timesyncd(params, section)) == result
def test_validate_certificate_expired(ca: CertificateAuthority) -> None: ca._days_valid = 1 with on_time(1638174087, "UTC"): cert, _priv_key = ca._certificate_from_root("abc123") with pytest.raises( CertificateValidationError, match="Client certificate expired", ): _validate_certificate(cert)
def test_get_annotation_date_render_function(annotation_times, result): annotations = [((None, None, None), { "from": s, "until": e }) for s, e in annotation_times] with on_time(1572253746, "CET"): assert (availability.get_annotation_date_render_function( # pylint:disable=comparison-with-callable annotations, {"range": ((1543446000, 1543446000 + 86399), "bla")}) == result)
def test_uptime_check_basic(): with on_time("2018-04-15 16:50", "CET"): assert list(uptime_utils.check({}, uptime_utils.Section( 123, None))) == [ Result(state=State.OK, summary="Up since Apr 15 2018 18:47:57"), Result(state=State.OK, summary="Uptime: 2 minutes 3 seconds"), Metric("uptime", 123.0), ]
def test_veeam_cdp_jobs_check( item: str, params: veeam_cdp_jobs.CheckParams, data: type_defs.StringTable, result: CheckResult, ): with on_time(1632216660, "UTC"): section = veeam_cdp_jobs.parse_veeam_cdp_jobs(data) assert list(veeam_cdp_jobs.check_veeam_cdp_jobs(item, params, section)) == result
def test_check_mqtt_clients(check_scenario: CheckScenario, monkeypatch: pytest.MonkeyPatch) -> None: if check_scenario.value_store: monkeypatch.setattr(mqtt, "get_value_store", check_scenario.value_store.copy) with on_time(581792400, "UTC"): assert (list( check_mqtt_clients( check_scenario.item, check_scenario.section)) == check_scenario.expected_result)
def test_open_log(tmp_path): log_file = tmp_path / "test.log" log.open_log(log_file) with on_time("2018-04-15 16:50", "CET"): log.logger.warning("abc") log.logger.warning("äbc") with log_file.open("rb") as f: assert f.read() == (b"2018-04-15 18:50:00,000 [30] [cmk] abc\n" b"2018-04-15 18:50:00,000 [30] [cmk] \xc3\xa4bc\n")
def test_cleanup_user_profiles_keep_active_profile_old(user_id): profile_dir = cmk.utils.paths.profile_dir.joinpath(user_id) assert profile_dir.exists() with on_time("2018-04-15 16:50", "CET"): for file_path in profile_dir.glob("*.mk"): os.utime(file_path, (time.time(), time.time())) userdb.UserProfileCleanupBackgroundJob()._do_cleanup() assert cmk.utils.paths.profile_dir.joinpath(user_id).exists()
def test_filters_filter(request_context, test, monkeypatch): # Needed for ABCFilterCustomAttribute monkeypatch.setattr(active_config, "wato_host_attrs", [{"name": "bla", "title": "Bla"}]) # Need for ABCTagFilter monkeypatch.setattr(active_config, "tags", cmk.utils.tags.BuiltinTagConfig()) with on_time("2018-04-15 16:50", "CET"): filt = cmk.gui.plugins.visuals.utils.filter_registry[test.ident] filter_vars = dict(filt.value()) # Default empty vars, exhaustive filter_vars.update(dict(test.request_vars)) assert filt.filter(filter_vars) == test.expected_filters