def get_logger(name): Fhandler = logging.FileHandler("analysis.log") Fhandler.setFormatter(jsonlogger.JsonFormatter("%(message)s %(name)s")) Fhandler.setLevel(level=logging.DEBUG) # Chandler = logging.StreamHandler(sys.stdout) # Chandler.setLevel(level=logging.INFO) # C_filter = logging.Filter() # C_filter.filter = lambda record : record.levelno >= logging.WARNING # Chandler.addFilter(C_filter) root_logger = logging.getLogger(name) root_logger.addHandler(Fhandler) # root_logger.addHandler(Chandler) root_logger.setLevel(level=logging.DEBUG) struct_logger = structlog.wrap_logger( root_logger, context_class=threadlocal.wrap_dict(dict), logger_factory=stdlib.LoggerFactory(), wrapper_class=stdlib.BoundLogger, processors=[ stdlib.filter_by_level, stdlib.add_logger_name, stdlib.add_log_level, stdlib.PositionalArgumentsFormatter(), processors.TimeStamper(fmt="iso"), processors.StackInfoRenderer(), processors.format_exc_info, processors.UnicodeDecoder(), stdlib.render_to_log_kwargs] ) return struct_logger
def __init__(self, log_level="INFO", log_network="udp", log_address="/dev/log"): """Initialize the logger.""" self.level = { "CRITICAL": logging.CRITICAL, "ERROR": logging.ERROR, "WARNING": logging.WARNING, "INFO": logging.INFO, "DEBUG": logging.DEBUG, "NOTSET": logging.NOTSET, } self.log_level = log_level self.syslog_address = log_address self.socktype = socket.SOCK_DGRAM if log_network == "tcp": self.socktype = socket.SOCK_STREAM configure( context_class=threadlocal.wrap_dict(dict), logger_factory=stdlib.LoggerFactory(), wrapper_class=stdlib.BoundLogger, processors=[ stdlib.filter_by_level, stdlib.add_logger_name, stdlib.add_log_level, stdlib.PositionalArgumentsFormatter(), processors.TimeStamper(fmt="iso"), processors.StackInfoRenderer(), processors.format_exc_info, processors.UnicodeDecoder(), stdlib.render_to_log_kwargs, ], )
def configure(): conf(context_class=threadlocal.wrap_dict(dict), logger_factory=stdlib.LoggerFactory(), wrapper_class=stdlib.BoundLogger, processors=[ stdlib.PositionalArgumentsFormatter(), processors.TimeStamper(fmt="iso"), processors.StackInfoRenderer(), processors.format_exc_info, processors.UnicodeDecoder(), stdlib.render_to_log_kwargs, ])
def configure_logging(cfg_type): # Get the logging config log_cfg = None if cfg_type.upper() in log_cfgs: log_cfg = log_cfgs[cfg_type.upper()] else: log_cfg = log_cfgs['LOCAL'] logging.config.dictConfig(log_cfg) # Configure the logger configure(context_class=threadlocal.wrap_dict(dict), logger_factory=stdlib.LoggerFactory(), wrapper_class=stdlib.BoundLogger, processors=[ stdlib.filter_by_level, stdlib.add_logger_name, stdlib.add_log_level, stdlib.PositionalArgumentsFormatter(), processors.TimeStamper(fmt="iso"), processors.StackInfoRenderer(), processors.format_exc_info, processors.UnicodeDecoder(), stdlib.render_to_log_kwargs ])
def configure_structlog(log_level='WARNING', log_file='kiwicom_wrap.log'): lvl = getattr(logging, log_level.upper()) configure( processors=[ # _drop_debug_logs, stdlib.add_log_level, stdlib.PositionalArgumentsFormatter(), _unix_timestamper, structlog_pretty.NumericRounder(), processors.format_exc_info, processors.UnicodeDecoder(), processors.JSONRenderer(), ], logger_factory=stdlib.LoggerFactory(), wrapper_class=stdlib.BoundLogger, ) handler = logging.FileHandler(filename=log_file) logger = get_logger('kiwiwrap') logger.setLevel(lvl) logger.addHandler(handler) return logger
def get_logger() -> Any: global LOGGER if not LOGGER: from structlog import configure, processors, stdlib, threadlocal, get_logger from pythonjsonlogger import jsonlogger logging.config.dictConfig(dict_config) configure( context_class=threadlocal.wrap_dict(dict), logger_factory=stdlib.LoggerFactory(), wrapper_class=stdlib.BoundLogger, processors=[ # Filter only the required log levels into the log output stdlib.filter_by_level, # Adds logger=module_name (e.g __main__) stdlib.add_logger_name, # Uppercase structlog's event name which shouldn't be convoluted with AWS events. event_uppercase, # Censor secure data censor_header, # Allow for string interpolation stdlib.PositionalArgumentsFormatter(), # Render timestamps to ISO 8601 processors.TimeStamper(fmt="iso"), # Include the stack dump when stack_info=True processors.StackInfoRenderer(), # Include the application exception when exc_info=True # e.g log.exception() or log.warning(exc_info=True)'s behavior processors.format_exc_info, # Decodes the unicode values in any kv pairs processors.UnicodeDecoder(), # Creates the necessary args, kwargs for log() stdlib.render_to_log_kwargs, ], cache_logger_on_first_use=True, ) LOGGER = get_logger() return LOGGER
'class': 'pythonjsonlogger.jsonlogger.JsonFormatter' } }, 'handlers': { 'json': { 'class': 'logging.StreamHandler', 'formatter': 'json' } }, 'loggers': { '': { 'handlers': ['json'], 'level': logging.DEBUG } } }) configure(context_class=threadlocal.wrap_dict(dict), logger_factory=stdlib.LoggerFactory(), wrapper_class=stdlib.BoundLogger, processors=[ stdlib.filter_by_level, stdlib.add_logger_name, stdlib.add_log_level, stdlib.PositionalArgumentsFormatter(), processors.TimeStamper(fmt="iso"), processors.StackInfoRenderer(), processors.format_exc_info, processors.UnicodeDecoder(), stdlib.render_to_log_kwargs ]) log = structlog.getLogger()
}, 'loggers': { '': { 'handlers': ['json'], 'level': logging.INFO } } }) configure( context_class=threadlocal.wrap_dict(dict), logger_factory=stdlib.LoggerFactory(), wrapper_class=stdlib.BoundLogger, processors=[ stdlib.filter_by_level, stdlib.add_logger_name, stdlib.add_log_level, stdlib.PositionalArgumentsFormatter(), processors.TimeStamper(fmt="iso"), processors.StackInfoRenderer(), processors.format_exc_info, processors.UnicodeDecoder(), stdlib.render_to_log_kwargs] )
def get_logging_conf() -> dict: """ This function returns the logging config as.py a dict :return: logging conf """ filename = ( "/home/as/mosquito_monitor.log" ) dir_name = osp.dirname(osp.normpath(filename)) pathlib.Path(dir_name).mkdir(parents=True, exist_ok=True) try: logging_conf = { "version": 1, "formatters": { "simple": { "format": "%(levelname)-6s :: %(name)-5s :: " "%(funcName)-5s :: %(message)s" }, "precise": { "format": "%(asctime)s :: %(levelname)-6s :: %(name)-5s ::" " %(funcName)-5s :: %(message)s" }, 'json_formatter': { 'format': '%(message)s %(lineno)d ' '%(funcName)s %(filename)s', 'class': 'pythonjsonlogger.jsonlogger.JsonFormatter' } }, "handlers": { "console": { "class": "logging.StreamHandler", "level": "DEBUG", "formatter": "simple" }, 'json': { 'formatter': 'json_formatter', 'backupCount': 4, 'class': 'logging.handlers.TimedRotatingFileHandler', 'encoding': 'ASCII', 'filename': filename, 'interval': 1, 'when': 'midnight', 'level': 'DEBUG' } }, "loggers": { "MOSQUITO_MONITOR": { "level": "DEBUG", "propagate": "no", "handlers": ["json", "console"] }, "local_mqtt_client.local_mqtt_client": { "level": "DEBUG", "propagate": "no", "handlers": ["json", "console"] } } } except SyntaxError as invalid_syntax_exception: raise ConfigException( "Invalid config provided, {}".format(invalid_syntax_exception) ) else: configure( context_class=threadlocal.wrap_dict(dict), logger_factory=stdlib.LoggerFactory(), wrapper_class=stdlib.BoundLogger, processors=[ stdlib.filter_by_level, stdlib.add_logger_name, stdlib.add_log_level, stdlib.PositionalArgumentsFormatter(), processors.TimeStamper(fmt='iso'), processors.StackInfoRenderer(), processors.format_exc_info, processors.UnicodeDecoder(), stdlib.render_to_log_kwargs, ] ) return logging_conf