def make_logger(scraper): """ Create two log handlers, one to output info-level ouput to the console, the other to store all logging in a JSON file which will later be used to generate reports. """ logger = logging.getLogger('') logger.setLevel(logging.DEBUG) requests_log = logging.getLogger("requests") requests_log.setLevel(logging.WARNING) json_handler = logging.FileHandler(log_path(scraper)) json_handler.setLevel(logging.DEBUG) json_formatter = jsonlogger.JsonFormatter(make_json_format()) json_handler.setFormatter(json_formatter) logger.addHandler(json_handler) console_handler = logging.StreamHandler() console_handler.setLevel(logging.INFO) fmt = '%(name)s [%(levelname)-8s]: %(message)s' formatter = logging.Formatter(fmt) console_handler.setFormatter(formatter) logger.addHandler(console_handler) logger = logging.getLogger(scraper.name) logger = TaskAdapter(logger, scraper) return logger
def __init__(self): super(JsonLogThread, self).__init__() self._should_shutdown = False self.json_logger = logging.getLogger('uStim') self.json_logger.setLevel(logging.DEBUG) self.json_logger.propagate = False # Prevents messages going to the console. self.formatter = jsonlogger.JsonFormatter() # Initially the fileHandler in None. Set it using the set_filename() method. self.fileHandler = None # Create a persistent, thread-safe queue to use when logging # things to disk. self.log_queue = Queue.Queue() # Mutex for controlling access to self.json_logger self.log_lock = threading.Lock() # Flag determines whether items are accepted into the queue self.recording = False # Time base self.T0 = time.time()
def testFormatKeys(self): supported_keys = [ 'asctime', 'created', 'filename', 'funcName', 'levelname', 'levelno', 'lineno', 'module', 'msecs', 'message', 'name', 'pathname', 'process', 'processName', 'relativeCreated', 'thread', 'threadName' ] log_format = lambda x : ['%({0:s})'.format(i) for i in x] custom_format = ' '.join(log_format(supported_keys)) fr = jsonlogger.JsonFormatter(custom_format) self.logHandler.setFormatter(fr) msg = "testing logging format" self.logger.info(msg) log_msg = self.buffer.getvalue() log_json = json.loads(log_msg) for supported_key in supported_keys: if supported_key in log_json: self.assertTrue(True)
def testFormatKeys(self): supported_keys = [ 'asctime', 'created', 'filename', 'funcName', 'levelname', 'levelno', 'lineno', 'module', 'msecs', 'message', 'name', 'pathname', 'process', 'processName', 'relativeCreated', 'thread', 'threadName' ] log_format = ' '.join(['%({})'] * len(supported_keys)) custom_format = log_format.format(*supported_keys) fr = jsonlogger.JsonFormatter(custom_format) self.logHandler.setFormatter(fr) msg = "testing logging format" self.logger.info(msg) logJson = json.loads(self.buffer.getvalue()) for supported_key in supported_keys: self.assertTrue(logJson.has_key(supported_key))
def testJsonDefaultEncoder(self): fr = jsonlogger.JsonFormatter() self.logHandler.setFormatter(fr) msg = {"adate": datetime.datetime(1999, 12, 31, 23, 59)} self.logger.info(msg) logJson = json.loads(self.buffer.getvalue()) self.assertEqual(logJson.get("adate"), "1999-12-31T23:59")
def testFormatParsingWithParentheses(self): fr = jsonlogger.JsonFormatter('(%(name)s) %(message)s') self.logHandler.setFormatter(fr) self.logger.info('some message') log_msg = self.buffer.getvalue() log_json = json.loads(log_msg) for key in ['name', 'message']: self.assertIn(key, log_json)
def testUnknownFormatKey(self): fr = jsonlogger.JsonFormatter('%(unknown_key)s %(message)s') self.logHandler.setFormatter(fr) msg = "testing unknown logging format" try: self.logger.info(msg) except: self.assertTrue(False, "Should succeed")
def testDefaultFormat(self): fr = jsonlogger.JsonFormatter() self.logHandler.setFormatter(fr) msg = "testing logging format" self.logger.info(msg) logJson = json.loads(self.buffer.getvalue()) self.assertEqual(logJson["message"], msg)
def testUnknownFormatKey(self): fr = jsonlogger.JsonFormatter('%(unknown_key)s %(message)s') self.logHandler.setFormatter(fr) msg = "testing logging format" try: self.logger.info(msg) except KeyError: self.assertTrue("KeyError exception thrown")
def testJsonCustomDefault(self): def custom(o): return "very custom" fr = jsonlogger.JsonFormatter(json_default=custom) self.logHandler.setFormatter(fr) msg = {"adate": datetime.datetime(1999, 12, 31, 23, 59), "normal": "value"} self.logger.info(msg) logJson = json.loads(self.buffer.getvalue()) self.assertEqual(logJson.get("adate"), "very custom") self.assertEqual(logJson.get("normal"), "value")
def testUnknownFormatKey(self): fr = jsonlogger.JsonFormatter('%(unknown_key)s %(message)s') self.assertRaises(KeyError, fr.format, logging.makeLogRecord({"msg":"test"})) self.logHandler.setFormatter(fr) msg = "testing unknown logging format" try: self.logger.info(msg) except: self.assertTrue(False, "Should succeed")
def testLogExtra(self): fr = jsonlogger.JsonFormatter() self.logHandler.setFormatter(fr) extra = {"text":"testing logging", "num": 1, 5: "9", "nested": {"more": "data"}} self.logger.info("hello", extra=extra) logJson = json.loads(self.buffer.getvalue()) self.assertEqual(logJson.get("text"), extra["text"]) self.assertEqual(logJson.get("num"), extra["num"]) self.assertEqual(logJson.get("5"), extra[5]) self.assertEqual(logJson.get("nested"), extra["nested"]) self.assertEqual(logJson["message"], "hello")
def testLogADict(self): fr = jsonlogger.JsonFormatter() self.logHandler.setFormatter(fr) msg = {"text":"testing logging", "num": 1, 5: "9", "nested": {"more": "data"}} self.logger.info(msg) logJson = json.loads(self.buffer.getvalue()) self.assertEqual(logJson.get("text"), msg["text"]) self.assertEqual(logJson.get("num"), msg["num"]) self.assertEqual(logJson.get("5"), msg[5]) self.assertEqual(logJson.get("nested"), msg["nested"]) self.assertEqual(logJson["message"], None)
def testDefaultFormatKeys(self): supported_keys = [ 'asctime', 'created', 'filename', 'funcName', 'levelname', 'levelno', 'lineno', 'module', 'msecs', 'message', 'name', 'pathname', 'process', 'processName', 'relativeCreated', 'thread', 'threadName' ] fr = jsonlogger.JsonFormatter() self.logHandler.setFormatter(fr) msg = "testing logging format" self.logger.info(msg) log_msg = self.buffer.getvalue() log_json = json.loads(log_msg) for supported_key in supported_keys: self.assertIn(supported_key, log_json)
def testLogException(self): fr = jsonlogger.JsonFormatter('%(levelname)s %(message)s') self.logHandler.setFormatter(fr) raise_line_no = None # noinspection PyBroadException try: raise_line_no = lineno() + 1 raise Exception('Some exception message') except Exception: self.logger.exception('Some log message') print self.buffer.getvalue() log_json = json.loads(self.buffer.getvalue()) self.assertEqual(log_json['message'], 'Some log message') self.assertEquals(log_json['excType'], 'exceptions.Exception') self.assertEquals(log_json['excValue'], 'Some exception message') self.assertEqual(len(log_json['excTrace']), 1) trace_frame = log_json['excTrace'][0] self.assertEqual(trace_frame['name'], 'testLogException') self.assertEqual(trace_frame['lineno'], raise_line_no) self.assertTrue(trace_frame['filename'].endswith('tests.py'))
import sys import os DYNAMODB = boto3.resource('dynamodb') TABLE = "fang" QUEUE = "producer" SQS = boto3.client("sqs") # SETUP LOGGING import logging from python-json-logger import jsonlogger LOG = logging.getLogger() LOG.setLevel(logging.INFO) logHandler = logging.StreamHandler() formatter = jsonlogger.JsonFormatter() logHandler.setFormatter(formatter) LOG.addHandler(logHandler) def scan_table(table): """Scans table and return results""" LOG.info(f"Scanning Table {table}") producer_table = DYNAMODB.Table(table) response = producer_table.scan() items = response['Items'] LOG.info(f"Found {len(items)} Items") return items def send_sqs_msg(msg, queue_name, delay=0): """Send SQS Message