def test_main_module(mocker: MockerFixture) -> None: expected_logger = structlog_sentry_logger.get_logger() # pylint: disable=protected-access mocker.patch.object(structlog_sentry_logger._config, "is_caller_main", lambda _: True) # pylint: enable=protected-access actual_logger = structlog_sentry_logger.get_logger() assert repr(expected_logger) == repr(actual_logger) module_name = structlog_sentry_logger.get_namespaced_module_name( __file__) assert (expected_logger.name == actual_logger.name == module_name == __name__ != "__main__")
def get_structlog_captured_logs( ) -> Union[List[MutableMapping[str, Any]], List[LogType]]: structlog_caplog = structlog.testing.LogCapture() orig_processors = structlog.get_config()["processors"] patched_procs = orig_processors.copy() patched_procs.insert(-1, structlog_caplog) structlog.configure(processors=patched_procs) for log_msg in random_log_msgs: structlog_sentry_logger.get_logger().debug(log_msg) structlog.configure(processors=orig_processors) captured_logs = structlog_caplog.entries assert captured_logs return captured_logs
def test_cloud_logging_log_key_added( caplog: LogCaptureFixture, monkeypatch: MonkeyPatch, test_data: dict, cloud_logging_compatibility_mode_env_var: str, ) -> None: # Enable Cloud Logging compatibility mode structlog.reset_defaults() monkeypatch.setenv( "_STRUCTLOG_SENTRY_LOGGER_STDLIB_BASED_LOGGER_MODE_ON", "ANY_VALUE") monkeypatch.setenv(cloud_logging_compatibility_mode_env_var, "ANY_VALUE") # Initialize Cloud Logging-compatible logger and perform logging logger = structlog_sentry_logger.get_logger() logger.debug("Testing Cloud Logging-compatible logger", **test_data) assert caplog.records # Parse logs and validate schema for test_log in [record.msg for record in caplog.records]: if isinstance(test_log, dict): # structlog logger for k in test_data: assert test_log[k] == test_data[k] assert "severity" in test_log assert test_log["level"] == test_log["severity"] else: raise NotImplementedError( "Captured log message not a supported type")
def get_pytest_captured_logs() -> Union[List[str], List[LogType]]: logger = structlog_sentry_logger.get_logger() for log_msg in random_log_msgs: logger.debug(log_msg) captured_logs = [record.msg for record in caplog.records] assert captured_logs return captured_logs
def test_sentry_DSN_integration( caplog: LogCaptureFixture, monkeypatch: MonkeyPatch, ) -> None: tests.utils.enable_sentry_integration_mode(monkeypatch) TestErrorClass = ConnectionError with pytest.raises(TestErrorClass): try: err_msg = "DUMMY ERROR TO TEST SENTRY CONNECTION" raise TestErrorClass(err_msg) except TestErrorClass as err: logger = structlog_sentry_logger.get_logger() # This line sends the above exception event to Sentry, with all the breadcrumbs included logger.exception("Exception caught and thrown") assert caplog.records for record in caplog.records: log = record.msg if isinstance(log, dict): # structlog logger assert log["level"] == "error" == record.levelname.lower() assert log["sentry"] == "sent" assert "sentry_id" in log elif isinstance(log, str): # other stdlib-based logger initialized BEFORE our structlog logger; # i.e., Sentry-invoked `urllib3.connectionpool` logger assert record.name == "urllib3.connectionpool" assert "sentry" in record.message else: raise NotImplementedError( "Captured log message not a supported type") from err raise err
def test(caplog: LogCaptureFixture, test_data: dict) -> None: logger = structlog_sentry_logger.get_logger() logger.debug("Testing main Logger", **test_data) assert caplog.records for record in caplog.records: log = record.msg if isinstance(log, dict): # structlog logger for k in test_data: assert log[k] == test_data[k] else: raise NotImplementedError( "Captured log message not a supported type")
def test(caplog: LogCaptureFixture, test_data: dict) -> None: if "all test cases simultaneously" in test_data: test_data = test_data["all test cases simultaneously"] logger = structlog_sentry_logger.get_logger() # nest non-str dict under a `test_data` kwarg; orjson can serialize non-str # keys, but these are not valid python kwarg keys logger.debug("Testing main Logger", test_data=test_data) assert caplog.records for record in caplog.records: log = record.msg if isinstance(log, dict): # structlog logger for k in test_data: assert log["test_data"][k] == test_data[k] else: raise NotImplementedError( "Captured log message not a supported type")
def test_cloud_logging_log_key_not_added_in_normal_logging( caplog: LogCaptureFixture, test_data: dict, ) -> None: # Initialize non-Cloud Logging-compatible logger and perform logging logger = structlog_sentry_logger.get_logger() logger.debug("Testing non-Cloud Logging-compatible logger", **test_data) assert caplog.records # Parse logs and validate schema for test_log in [record.msg for record in caplog.records]: if isinstance(test_log, dict): # structlog logger assert "severity" not in test_log else: raise NotImplementedError( "Captured log message not a supported type")
def test_cloud_logging_log_key_overwritten( capsys: CaptureFixture, monkeypatch: MonkeyPatch, test_data: dict, cloud_logging_compatibility_mode_env_var: str, ) -> None: # Enable Cloud Logging compatibility mode tests.utils.reset_logging_configs() monkeypatch.setenv(cloud_logging_compatibility_mode_env_var, "ANY_VALUE") # Initialize Cloud Logging-compatible logger and perform logging logger = structlog_sentry_logger.get_logger() orig_cloud_logging_log_key_value = "DUMMY_VALUE_FOR_TESTING" with pytest.warns(RuntimeWarning): logger.debug( "Testing Cloud Logging-compatible logger", **test_data, severity=orig_cloud_logging_log_key_value, ) # Parse logs library_log, test_log = tests.utils.read_json_logs_from_stdout(capsys) if not (isinstance(test_log, dict) and isinstance(library_log, dict)): raise NotImplementedError( "Captured log messages not a supported type") cloud_logging_log_level_key, python_log_level_key = "severity", "level" # Validate Cloud Logging key correctly overwritten assert (test_log[python_log_level_key] == test_log[cloud_logging_log_level_key] != orig_cloud_logging_log_key_value) # Validate debug log schema assert (library_log[python_log_level_key] == "warning" != orig_cloud_logging_log_key_value) assert library_log["src_key"] == python_log_level_key assert library_log["dest_key"] == cloud_logging_log_level_key assert library_log["old_value"] == orig_cloud_logging_log_key_value assert library_log["new_value"] == "debug" assert library_log["logger_that_used_reserved_key"] == logger.name
def test_non_structlog_logger(caplog: LogCaptureFixture, random_log_msgs: List[uuid.UUID]) -> None: logger = structlog_sentry_logger.get_logger() for log_msg in random_log_msgs: logger.debug(log_msg) assert caplog.records non_structlogged_records = [ record for record in caplog.records if not isinstance(record.msg, dict) ] for record in non_structlogged_records: log = record.msg if isinstance(log, str): # other stdlib-based logger initialized BEFORE our structlog logger; # i.e., Sentry-invoked `urllib3.connectionpool` logger assert record.name == "urllib3.connectionpool" assert "sentry" in record.message else: raise NotImplementedError( "Captured log message not a supported type")
def _test( capsys: CaptureFixture, test_data: dict, is_cloud_logging_mode: bool, ) -> None: logger = structlog_sentry_logger.get_logger() logger.debug("Testing main Logger", **test_data) test_log = tests.utils.read_json_logs_from_stdout(capsys)[0] if isinstance(test_log, dict): for k in test_data: actual = structlog_sentry_logger._config.serializer( test_log[k]) expected = structlog_sentry_logger._config.serializer( test_data[k]) assert actual == expected if is_cloud_logging_mode: assert "severity" in test_log assert test_log["level"] == test_log["severity"] else: assert "severity" not in test_log else: raise NotImplementedError( "Captured log message not a supported type")
def test_structlog_logger( caplog: LogCaptureFixture, monkeypatch: MonkeyPatch, random_log_msgs: List[uuid.UUID], is_sentry_integration_mode_requested: bool, ) -> None: tests.utils.enable_sentry_integration_mode(monkeypatch) if not is_sentry_integration_mode_requested: monkeypatch.delenv( "STRUCTLOG_SENTRY_LOGGER_CLOUD_SENTRY_INTEGRATION_MODE_ON", raising=False, ) logger = structlog_sentry_logger.get_logger() for log_msg in random_log_msgs: logger.debug(log_msg) assert caplog.records structlogged_records = [ record for record in caplog.records if isinstance(record.msg, dict) ] module_name = structlog_sentry_logger.get_namespaced_module_name( __file__) for record, log_msg in zip(structlogged_records, random_log_msgs): log = record.msg assert log["level"] == "debug" == record.levelname.lower( ) # type: ignore[index] assert (log["logger"] == logger.name == record.name == module_name == __name__ # type: ignore[index] ) assert log["event"] == log_msg # type: ignore[index] if is_sentry_integration_mode_requested: assert log["sentry"] == "skipped" # type: ignore[index] else: assert "sentry" not in log assert "timestamp" in log
import structlog_sentry_logger LOGGER = structlog_sentry_logger.get_logger() MODULE_NAME = structlog_sentry_logger.get_namespaced_module_name(__file__) SLEEP_TIME = 2 def log_warn() -> None: LOGGER.warn(MODULE_NAME, file=__file__, name=__name__, sleep_time=SLEEP_TIME)
def _benchmark_runner(benchmark: BenchmarkFixture, test_cases: dict) -> None: logger = structlog_sentry_logger.get_logger() benchmark(utils.lots_of_logging, logger=logger, test_cases=test_cases)