def test_with_unserialisable_value_in_message(stdout): logger = Logger(level="DEBUG", stream=stdout) class X: pass msg = {"x": X()} logger.debug(msg) log_dict = json.loads(stdout.getvalue()) assert log_dict["message"]["x"].startswith("<")
def test_with_unserializable_value_in_message(stdout, service_name): logger = Logger(service=service_name, level="DEBUG", stream=stdout) class Unserializable: pass msg = {"x": Unserializable()} logger.debug(msg) log_dict = json.loads(stdout.getvalue()) assert log_dict["message"]["x"].startswith("<")
def test_setup_sampling_rate_env_var(monkeypatch, stdout, service_name): # GIVEN Logger is initialized # WHEN samping rate is explicitly set to 100% via POWERTOOLS_LOGGER_SAMPLE_RATE env sampling_rate = "1" monkeypatch.setenv("POWERTOOLS_LOGGER_SAMPLE_RATE", sampling_rate) logger = Logger(service=service_name, stream=stdout) logger.debug("I am being sampled") # THEN sampling rate should be equals POWERTOOLS_LOGGER_SAMPLE_RATE value # log level should be DEBUG # and debug log statements should be in stdout log = capture_logging_output(stdout) assert sampling_rate == log["sampling_rate"] assert "DEBUG" == log["level"] assert "I am being sampled" == log["message"]
def test_with_unserializable_value_in_message_custom(stdout): class Unserializable: pass # GIVEN a custom json_default logger = Logger(level="DEBUG", stream=stdout, json_default=lambda o: f"<non-serializable: {type(o).__name__}>") # WHEN we log a message logger.debug({"x": Unserializable()}) log_dict = json.loads(stdout.getvalue()) # THEN json_default should not be in the log message and the custom unserializable handler should be used assert log_dict["message"]["x"] == "<non-serializable: Unserializable>" assert "json_default" not in log_dict
def test_setup_sampling_rate(monkeypatch, stdout): # GIVEN samping rate is explicitly defined via POWERTOOLS_LOGGER_SAMPLE_RATE env # WHEN logger is setup # THEN sampling rate should be equals POWERTOOLS_LOGGER_SAMPLE_RATE value and should sample debug logs sampling_rate = "1" monkeypatch.setenv("POWERTOOLS_LOGGER_SAMPLE_RATE", sampling_rate) monkeypatch.setenv("LOG_LEVEL", "INFO") logger = Logger(stream=stdout) logger.debug("I am being sampled") log = json.loads(stdout.getvalue()) assert sampling_rate == log["sampling_rate"] assert "DEBUG" == log["level"] assert "I am being sampled" == log["message"]
def event_log(s3_bucket: str, s3_key: str, function_name: str, logger: Logger) -> EventLog: logger.append_keys(s3_bucket=s3_bucket, s3_key=s3_key) logger.debug(f"processing s3 event") log = EventLog( status=STATUS_PROCESSING, s3_key=s3_key, s3_bucket=s3_bucket, function=function_name, region=os.environ.get('AWS_REGION', 'no region set'), received_time=datetime.now(timezone.utc), # TODO get from s3 record ) log.logger = logger log.save() try: yield log log.mark_processed() except Exception as e: if logger.log_level == 'DEBUG': logger.exception(f"error processing s3 object") log.mark_failed(str(e)) raise
import json import os import boto3 from aws_lambda_powertools import Logger, Metrics, Tracer from boto3.dynamodb.conditions import Key from shared import get_cart_id, get_headers, handle_decimal_type logger = Logger() tracer = Tracer() metrics = Metrics() dynamodb = boto3.resource("dynamodb") logger.debug("Initializing DDB Table %s", os.environ["TABLE_NAME"]) table = dynamodb.Table(os.environ["TABLE_NAME"]) @metrics.log_metrics(capture_cold_start_metric=True) @logger.inject_lambda_context(log_event=True) @tracer.capture_lambda_handler def lambda_handler(event, context): """ Update cart table to use user identifier instead of anonymous cookie value as a key. This will be called when a user is logged in. """ cart_id, _ = get_cart_id(event["headers"]) try: # Because this method is authorized at API gateway layer, we don't need to validate the JWT claims here