def test_sentry_json_ignore_logger_using_logger_instance_name(mocker): m_ignore_logger = mocker.patch("structlog_sentry.ignore_logger") m_logger = MockLogger("MockLogger") event_data = {"level": "info", "event": "message"} processor = SentryJsonProcessor() assert not processor._ignored processor._ignore_logger(logger=m_logger, event_dict=event_data) m_ignore_logger.assert_called_once_with(m_logger.name) assert m_logger.name in processor._ignored
def test_sentry_json_ignore_logger_using_event_dict_record(mocker): m_ignore_logger = mocker.patch("structlog_sentry.ignore_logger") m_logger = MockLogger("MockLogger") event_data = { "level": "info", "event": "message", "_record": MockLogger("RecordLogger"), } processor = SentryJsonProcessor() assert not processor._ignored processor._ignore_logger(logger=m_logger, event_dict=event_data) m_ignore_logger.assert_called_once_with(event_data["_record"].name) assert event_data["_record"].name in processor._ignored
def init_logging(self): """Init logging.""" if self.DEBUG: # pragma: no cover self.LOG_CONFIG['loggers']['tg_odesli_bot']['level'] = 'DEBUG' if not isinstance(self.LOG_RENDERER, structlog.dev.ConsoleRenderer): self.LOG_RENDERER = structlog.dev.ConsoleRenderer(pad_event=50) logging.config.dictConfig(self.LOG_CONFIG) structlog.configure( processors=[ structlog.stdlib.add_logger_name, structlog.stdlib.add_log_level, structlog.stdlib.PositionalArgumentsFormatter(), structlog.processors.TimeStamper(fmt='iso'), structlog.processors.StackInfoRenderer(), structlog.processors.UnicodeDecoder(), SentryJsonProcessor(level=logging.WARNING, tag_keys=['status_code']), structlog.processors.format_exc_info, self.LOG_RENDERER, ], logger_factory=structlog.stdlib.LoggerFactory(), wrapper_class=structlog.stdlib.BoundLogger, cache_logger_on_first_use=True, )
def test_sentry_json_call_ignores_logger_once(mocker): processor = SentryJsonProcessor() m_ignore_logger = mocker.patch("structlog_sentry.ignore_logger") event_data = {"level": "warning", "event": "message", "sentry_skip": True} logger = MockLogger("MockLogger") processor(logger, None, event_data) processor(logger, None, event_data) processor(logger, None, event_data) m_ignore_logger.assert_called_once_with(logger.name)
def test_sentry_json_ignores_multiple_loggers_once(mocker): processor = SentryJsonProcessor() m_ignore_logger = mocker.patch( "structlog_sentry.logging_int_ignore_logger") event_data = {"level": "warning", "event": "message", "sentry_skip": True} logger = MockLogger("MockLogger") logger2 = MockLogger("MockLogger2") processor(logger, None, event_data) processor(logger, None, event_data) processor(logger, None, event_data) m_ignore_logger.assert_called_once_with(logger.name) m_ignore_logger.reset_mock() processor(logger2, None, event_data) processor(logger2, None, event_data) processor(logger2, None, event_data) m_ignore_logger.assert_called_once_with(logger2.name)
], False: [ structlog.stdlib.add_logger_name, structlog.stdlib.add_log_level, structlog.stdlib.filter_by_level, structlog.stdlib.PositionalArgumentsFormatter(), structlog.processors.TimeStamper(), SentryJsonProcessor( level=logging.ERROR, tag_keys=[ "environment", "level", "logger", "runtime", "server_name", "video_id", "video_hexkey", "s3_object_key", "filename", "youtubevideo_id", "youtubevideo_video_id", "video_status", ], as_extra=False, ), structlog.processors.StackInfoRenderer(), structlog.processors.format_exc_info, structlog.processors.JSONRenderer(), ], } structlog.configure(
""" Enable Flask metrics, using https://github.com/rycus86/prometheus_flask_exporter """ # Logging config # ============== logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.INFO) # Structlog # ========= structlog.configure( processors=[ structlog.stdlib.filter_by_level, structlog.stdlib.add_logger_name, structlog.stdlib.add_log_level, structlog.stdlib.PositionalArgumentsFormatter(), structlog.processors.TimeStamper(fmt="iso"), structlog.processors.StackInfoRenderer(), structlog.processors.format_exc_info, structlog.processors.UnicodeDecoder(), SentryJsonProcessor(level=logging.ERROR, tag_keys="__all__"), structlog.processors.JSONRenderer(), ], context_class=dict, logger_factory=structlog.stdlib.LoggerFactory(), wrapper_class=structlog.stdlib.BoundLogger, cache_logger_on_first_use=True, )
def configure_logging(command: Optional[str] = None): """Configure stdlib logging and structlog, and if SENTRY_DSN is set, Sentry. Parameters: command: a command name added to the Sentry events. """ # First structlog is configured to send errors to Sentry and use stdlib for console logging. If we start # getting duplicate logs from structlog I'm guessing stdlib is also logging them so remove SentryProcessor. # Based on https://www.structlog.org/en/stable/standard-library.html#rendering-using-structlog-based-formatters-within-logging structlog.configure( context_class=dict, wrapper_class=structlog.stdlib.BoundLogger, processors=[ structlog.stdlib. add_log_level, # required before SentryProcessor() # sentry_sdk creates events for level >= ERROR. Getting breadcrumbs from structlog isn't supported # without a lot of custom work. See https://github.com/kiwicom/structlog-sentry/issues/25. # The SentryJsonProcessor is used to protect against event duplication. # It adds loggers to a sentry_sdk ignore list, making sure that the message logged is # not reported in addition to the exception stack trace. SentryJsonProcessor(level=logging.ERROR, tag_keys="__all__"), structlog.processors.TimeStamper(fmt="iso"), structlog.processors.StackInfoRenderer(), structlog.processors.format_exc_info, # It is important that wrap_for_formatter is the last processor. It converts the processed # event dict to something that the ProcessorFormatter (the logging.Formatter passed to # setFormatter) understands. structlog.stdlib.ProcessorFormatter.wrap_for_formatter, ], logger_factory=structlog.stdlib.LoggerFactory(), cache_logger_on_first_use=True, ) # Second, configure stdlib logging to format structlog events and any other parameters we want to set. formatter = structlog.stdlib.ProcessorFormatter( # TODO(tom) fix https://github.com/hynek/structlog/issues/166 so keys appear in added order processor=structlog.dev.ConsoleRenderer(), foreign_pre_chain=[ structlog.stdlib.add_logger_name, structlog.processors.TimeStamper(fmt="iso"), ], ) handler = logging.StreamHandler() handler.setFormatter(formatter) root_logger = logging.getLogger() root_logger.addHandler(handler) root_logger.setLevel(logging.INFO) # Initialize sentry_sdk. sentry_dsn = os.getenv("SENTRY_DSN") sentry_environment = None if SENTRY_ENVIRONMENT_ENV in os.environ: sentry_environment = Environment(os.getenv(SENTRY_ENVIRONMENT_ENV)) if sentry_dsn: sentry_sdk.init(sentry_dsn, environment=sentry_environment) if command: with sentry_sdk.configure_scope() as scope: scope.set_tag("command", command)
"handlers": [ "console", ], "level": "CRITICAL", }, }, } WrappedDictClass = wrap_dict(dict) structlog.configure( processors=[ structlog.stdlib.filter_by_level, structlog.stdlib.add_logger_name, structlog.stdlib.add_log_level, structlog.stdlib.PositionalArgumentsFormatter(), structlog.processors.TimeStamper(fmt="iso"), structlog.processors.StackInfoRenderer(), structlog.processors.format_exc_info, SentryJsonProcessor(level=ERROR), structlog.processors.JSONRenderer(), ], context_class=WrappedDictClass, logger_factory=structlog.stdlib.LoggerFactory(), wrapper_class=structlog.stdlib.BoundLogger, cache_logger_on_first_use=True, ) cssutils_log = getLogger("cssutils") cssutils.log.setLog(cssutils_log)
dev.set_exc_info, processors.format_exc_info, date_formatter, add_environment, stdlib.ProcessorFormatter.wrap_for_formatter, ] prod_processors = [ stdlib.filter_by_level, stdlib.add_logger_name, stdlib.add_log_level, add_environment, processors.format_exc_info, processors.UnicodeDecoder(), processors.TimeStamper(fmt="ISO", utc=True, key="@timestamp"), SentryJsonProcessor(level=logging.ERROR, tag_keys=["environment"]), stdlib.ProcessorFormatter.wrap_for_formatter, ] if ENVIRONMENT in ("dev", "test"): processors_list = dev_processors else: processors_list = prod_processors sentry_sdk.init( dsn=str(SENTRY_DSN), environment=ENVIRONMENT, ) configure( processors=processors_list, logger_factory=stdlib.LoggerFactory(),