def get_logger(): logging.setLoggerClass(PyLogrus) logger = logging.getLogger(__name__) # type: PyLogrus logger.setLevel(logging.DEBUG) enabled_fields = [('name', 'logger_name'), ('asctime', 'service_timestamp'), ('levelname', 'level'), ('threadName', 'thread_name'), 'message', ('exception', 'exception_class'), ('stacktrace', 'stack_trace'), 'module', ('funcName', 'function')] formatter = JsonFormatter(datefmt='Z', enabled_fields=enabled_fields, indent=2, sort_keys=True) formatter.override_level_names({'CRITICAL': 'FATAL', 'WARNING': 'WARN'}) ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) ch.setFormatter(formatter) logger.addHandler(ch) return logger
def test_level_names_overriding(self): formatter = JsonFormatter() formatter.override_level_names({'CRITICAL': 'FATAL'}) log = self.get_logger(formatter) log.critical("test message") with open(self.filename) as f: content = json.loads(f.readlines()[-1]) self.assertEqual(content['levelname'], 'FATAL')
def test_unicode(self): formatter = JsonFormatter() log = self.get_logger(formatter) log.debug("😄 😁 😆 😅 😂") with open(self.filename, 'rb') as f: content = json.loads(f.readlines()[-1]) self.assertIn("\U0001f604 \U0001f601 \U0001f606 \U0001f605 \U0001f602", repr(content['message']))
def get_logger(): # POC is using a python port of the Golang logging library the final # version will use (when coded in golang). It's a bit un-pythonic but will # ease the move from PoC to final code. logging.setLoggerClass(PyLogrus) logger = logging.getLogger(__name__) # type: PyLogrus logger.setLevel(logging.DEBUG) enabled_fields = [('name', 'logger_name'), ('asctime', 'service_timestamp'), ('levelname', 'level'), ('threadName', 'thread_name'), 'message', ('exception', 'exception_class'), ('stacktrace', 'stack_trace'), 'module', ('funcName', 'function')] formatter = JsonFormatter(datefmt='Z', enabled_fields=enabled_fields, sort_keys=True) # Uncomment below to output structured logs in commandline-friendly text format #formatter = TextFormatter(datefmt='Z', colorize=False) ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) ch.setFormatter(formatter) logger.addHandler(ch) return logger
def test_extra_fields(self): formatter = JsonFormatter() log = self.get_logger(formatter) log.withFields({'user': '******'}).info("test message") with open(self.filename) as f: content = json.loads(f.readlines()[-1]) self.assertIn('user', content) self.assertEqual(content['user'], 'John Doe')
def __init__(self, config_path): self.config = self.get_config(config_path) fh = logging.FileHandler( "logs/sitealgo" + str(self.config["site_id"]) + ".log", 'w+') fh.setLevel(logging.DEBUG) jsonformatter = JsonFormatter(datefmt='Z') fh.setFormatter(jsonformatter) logger.addHandler(fh) log = logger.withFields({"node": "site-algo"})
def test_date_format(self): formatter = JsonFormatter(datefmt='%m/%d/%Y %I:%M:%S %p') log = self.get_logger(formatter) log.info("test a date in custom format") with open(self.filename) as f: content = json.loads(f.readlines()[-1]) pattern = "^\d{2}\/\d{2}\/\d{4} \d{2}:\d{2}:\d{2} [AP]M$" if sys.version_info >= (3, 1): # Python version >= 3.1 self.assertRegex(content['asctime'], pattern) else: self.assertRegexpMatches(content['asctime'], pattern)
def test_date_format_zulu(self): formatter = JsonFormatter(datefmt='Z') log = self.get_logger(formatter) log.info("test a date in Zulu format") with open(self.filename) as f: content = json.loads(f.readlines()[-1]) pattern = "^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$" if sys.version_info >= (3, 1): # Python version >= 3.1 self.assertRegex(content['asctime'], pattern) else: self.assertRegexpMatches(content['asctime'], pattern)
def test_message_with_prefix(self): formatter = JsonFormatter() log = self.get_logger(formatter) log_pfx = log.withPrefix("[API]") log_pfx.info("Log message with the prefix") with open(self.filename) as f: content = json.loads(f.readlines()[-1]) self.assertEqual(content['message'], "[API] Log message with the prefix") log_pfx.critical("Another one log message with the prefix") with open(self.filename) as f: content = json.loads(f.readlines()[-1]) self.assertEqual(content['message'], "[API] Another one log message with the prefix")
def test_json_encoder(self): class MyEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, datetime.datetime): return obj.isoformat() else: return super().default(obj) formatter = JsonFormatter(json_encoder=MyEncoder) log = self.get_logger(formatter, name="json-encoder") now = datetime.datetime.now() log.withFields({"some_datetime": now}).debug("TEST") with open(self.filename) as f: content = json.loads(f.readlines()[-1]) self.assertIn("some_datetime", content) self.assertEqual(content["some_datetime"], now.isoformat()) self.assertIn("message", content) self.assertEqual(content["message"], "TEST")
def test_levels(self): formatter = JsonFormatter() log = self.get_logger(formatter) log.debug("test_debug_output") with open(self.filename) as f: content = json.loads(f.readlines()[-1]) # print(content) self.assertIn('levelname', content) self.assertEqual(content['levelname'], 'DEBUG') self.assertIn('message', content) self.assertEqual(content['message'], 'test_debug_output') log.info("test_info_output") with open(self.filename) as f: content = json.loads(f.readlines()[-1]) self.assertIn('levelname', content) self.assertEqual(content['levelname'], 'INFO') self.assertIn('message', content) self.assertEqual(content['message'], 'test_info_output') log.warning("test_warning_output") with open(self.filename) as f: content = json.loads(f.readlines()[-1]) self.assertIn('levelname', content) self.assertEqual(content['levelname'], 'WARNING') self.assertIn('message', content) self.assertEqual(content['message'], 'test_warning_output') log.error("test_error_output") with open(self.filename) as f: content = json.loads(f.readlines()[-1]) self.assertIn('levelname', content) self.assertEqual(content['levelname'], 'ERROR') self.assertIn('message', content) self.assertEqual(content['message'], 'test_error_output') log.critical("test_critical_output") with open(self.filename) as f: content = json.loads(f.readlines()[-1]) self.assertIn('levelname', content) self.assertEqual(content['levelname'], 'CRITICAL') self.assertIn('message', content) self.assertEqual(content['message'], 'test_critical_output')
def test_contextual_logging(self): formatter = JsonFormatter() log = self.get_logger(formatter) log_ctx = log.withFields({'context': 1}) log_ctx.withFields({'user': '******'}).info("contextual logger") with open(self.filename) as f: content = json.loads(f.readlines()[-1]) self.assertIn('context', content) self.assertEqual(content['context'], 1) self.assertIn('user', content) self.assertEqual(content['user'], 'John Doe') log.withFields({'company': 'Awesome Company'}).info("default logger") with open(self.filename) as f: content = json.loads(f.readlines()[-1]) self.assertNotIn('context', content) self.assertIn('company', content) self.assertEqual(content['company'], 'Awesome Company')
def test_enabled_fields(self): enabled_fields = [('asctime', 'service_timestamp'), ('levelname', 'level'), ('threadName', 'thread_name'), 'message', ('exception', 'exception_class'), ('stacktrace', 'stack_trace'), ('funcName', 'function')] formatter = JsonFormatter(enabled_fields=enabled_fields) log = self.get_logger(formatter) log.info("test message") with open(self.filename) as f: content = json.loads(f.readlines()[-1]) self.assertEqual(len(content), 7) self.assertIn('service_timestamp', content) self.assertIn('level', content) self.assertIn('thread_name', content) self.assertIn('message', content) self.assertIn('exception_class', content) self.assertIn('stack_trace', content) self.assertIn('function', content)
def __init__(self, config_path): self.config = self.get_config(config_path) # Setup logging tool logging.setLoggerClass(PyLogrus) logger = logging.getLogger(__name__) # type: PyLogrus logger.setLevel(logging.DEBUG) formatter = TextFormatter(datefmt='Z', colorize=True) ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) ch.setFormatter(formatter) jsonformatter = JsonFormatter(datefmt='Z') fh = logging.FileHandler("logs/cloudalgo.log", 'w+') fh.setLevel(logging.DEBUG) fh.setFormatter(jsonformatter) logger.addHandler(ch) logger.addHandler(fh) self.log = logger.withFields({"node": "cloud-algo"})
logger.warning("WARNING MESSAGE") logger.error("ERROR MESSAGE") print("メッセージにPrefixをつける") logger = logger.withPrefix("[API]") logger.debug("DEBUG MESSAGE") print("メッセージにフィールドを追加する") logger.withFields({'error_code': 404}).info("INFO MESSAGE") print("JSON形式で出力") logger = logging.getLogger(__name__) enabled_fields = [('name', 'logger_name'), ('asctime', 'service_timestamp'), ('levelname', 'level'), ('threadName', 'thread_name'), 'message', ('exception', 'exception_class'), ('stacktrace', 'stack_trace'), 'module', ('funcName', 'function')] formatter = JsonFormatter(datefmt='Z', enabled_fields=enabled_fields, indent=2, sort_keys=True) ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) ch.setFormatter(formatter) logger.addHandler(ch) logger.debug("DEBUG MESSAGE") logger.info("INFO MESSAGE") logger.warning("WARNING MESSAGE") logger.error("ERROR MESSAGE")
if __name__ == "__main__": max_iters = 100 iters_per_epoch = 10 learning_rate = 1e-4 batch_size = 16 logging.setLoggerClass(PyLogrus) logger = logging.getLogger(__name__) # type: PyLogrus logger.setLevel(logging.DEBUG) formatter = TextFormatter(datefmt='Z', colorize=True) ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) ch.setFormatter(formatter) jsonformatter = JsonFormatter(datefmt='Z') fh = logging.FileHandler("logs/resnet_baseline.log", 'w+') fh.setLevel(logging.DEBUG) fh.setFormatter(jsonformatter) logger.addHandler(ch) logger.addHandler(fh) log = logger.withFields({"node": "baseline"}) model = torchvision.models.resnet18(pretrained=True) in_features = model.fc.in_features model.fc = torch.nn.Linear(in_features, 2) optimizer = torch.optim.SGD(model.parameters(), learning_rate) criterion = torch.nn.CrossEntropyLoss()