def test_get_logger_new_no_name(): assert len(containerlog.manager.loggers) == 0 logger = containerlog.get_logger() assert logger.name == "test_containerlog.test_get_logger_new_no_name" assert logger.level == containerlog.DEBUG assert len(containerlog.manager.loggers) == 1
def test_get_logger_existing(): expected = containerlog.Logger("test", manager=containerlog.manager) containerlog.manager.loggers["test"] = expected assert len(containerlog.manager.loggers) == 1 logger = containerlog.get_logger("test") assert logger == expected assert len(containerlog.manager.loggers) == 1
"""""" import asyncio import random import time from containerlog import enable_contextvars, get_logger from containerlog.contextvars import bind, context_binding, unbind enable_contextvars() logger = get_logger() l2 = get_logger("other") async def nested(): with context_binding(scope="nested"): l2.info("additional call") async def run(name: str): logger.info("starting runner", name=name) bind(runner=name) for _ in range(10): wait = random.randrange(1, 15) / 10 logger.info("waiting for next message", now=time.time(), wait=wait) await nested() await asyncio.sleep(wait) logger.info("unbinding runner name")
"long-simple": bench_long_simple, "short-complex": bench_short_complex, "long-complex": bench_long_complex, "exception": bench_exception, "async-context": bench_async_context, } if __name__ == "__main__": runner = pyperf.Runner() runner.metadata["description"] = "Test the performance of containerlog." # Note: StringIO performance will impact the results stream = io.StringIO() # Setup the logger log = containerlog.get_logger("bench-containerlog") log.level = containerlog.WARN log.writeout = stream.write log.writeerr = stream.write for name, fn in BENCHMARKS.items(): # Truncate the stream before each benchmark. stream.seek(0) stream.truncate() runner.bench_time_func( name, fn, log, inner_loops=25, )
def __init__(self, name: str) -> None: self.containerlog = containerlog.get_logger(name) super(StdLoggerProxy, self).__init__(name)