def get_logger(name): Fhandler = logging.FileHandler("analysis.log") Fhandler.setFormatter(jsonlogger.JsonFormatter("%(message)s %(name)s")) Fhandler.setLevel(level=logging.DEBUG) # Chandler = logging.StreamHandler(sys.stdout) # Chandler.setLevel(level=logging.INFO) # C_filter = logging.Filter() # C_filter.filter = lambda record : record.levelno >= logging.WARNING # Chandler.addFilter(C_filter) root_logger = logging.getLogger(name) root_logger.addHandler(Fhandler) # root_logger.addHandler(Chandler) root_logger.setLevel(level=logging.DEBUG) struct_logger = structlog.wrap_logger( root_logger, context_class=threadlocal.wrap_dict(dict), logger_factory=stdlib.LoggerFactory(), wrapper_class=stdlib.BoundLogger, processors=[ stdlib.filter_by_level, stdlib.add_logger_name, stdlib.add_log_level, stdlib.PositionalArgumentsFormatter(), processors.TimeStamper(fmt="iso"), processors.StackInfoRenderer(), processors.format_exc_info, processors.UnicodeDecoder(), stdlib.render_to_log_kwargs] ) return struct_logger
def __init__(self, log_level="INFO", log_network="udp", log_address="/dev/log"): """Initialize the logger.""" self.level = { "CRITICAL": logging.CRITICAL, "ERROR": logging.ERROR, "WARNING": logging.WARNING, "INFO": logging.INFO, "DEBUG": logging.DEBUG, "NOTSET": logging.NOTSET, } self.log_level = log_level self.syslog_address = log_address self.socktype = socket.SOCK_DGRAM if log_network == "tcp": self.socktype = socket.SOCK_STREAM configure( context_class=threadlocal.wrap_dict(dict), logger_factory=stdlib.LoggerFactory(), wrapper_class=stdlib.BoundLogger, processors=[ stdlib.filter_by_level, stdlib.add_logger_name, stdlib.add_log_level, stdlib.PositionalArgumentsFormatter(), processors.TimeStamper(fmt="iso"), processors.StackInfoRenderer(), processors.format_exc_info, processors.UnicodeDecoder(), stdlib.render_to_log_kwargs, ], )
def configure(): conf(context_class=threadlocal.wrap_dict(dict), logger_factory=stdlib.LoggerFactory(), wrapper_class=stdlib.BoundLogger, processors=[ stdlib.PositionalArgumentsFormatter(), processors.TimeStamper(fmt="iso"), processors.StackInfoRenderer(), processors.format_exc_info, processors.UnicodeDecoder(), stdlib.render_to_log_kwargs, ])
def get_logger(): LOGGING = { # 基本设置 'version': 1, # 日志级别 'disable_existing_loggers': False, # 是否禁用现有的记录器 # 日志格式集合 'formatters': { # 标准输出格式 'json': { # [具体时间][线程名:线程ID][日志名字:日志级别名称(日志级别ID)] [输出的模块:输出的函数]:日志内容 'format': '[%(asctime)s][%(threadName)s:%(thread)d][%(name)s:%(levelname)s(%(lineno)d)]\n[%(module)s:%(funcName)s]:%(message)s', 'class': 'pythonjsonlogger.jsonlogger.JsonFormatter', } }, # 过滤器 'filters': { 'require_debug_true': { '()': RequireDebugTrue, } }, # 处理器集合 'handlers': { # 输出到控制台 # 输出到文件 'TimeChecklog': { 'level': 'DEBUG', 'class': 'logging.handlers.RotatingFileHandler', 'formatter': 'json', 'filename': os.path.join("./log/", 'TimeoutCheck.log'), # 输出位置 'maxBytes': 1024 * 1024 * 5, # 文件大小 5M 'backupCount': 5, # 备份份数 'encoding': 'utf8', # 文件编码 }, }, # 日志管理器集合 'loggers': { # 管理器 'proxyCheck': { 'handlers': ['TimeChecklog'], 'level': 'DEBUG', 'propagate': True, # 是否传递给父记录器 }, } } configure(logger_factory=stdlib.LoggerFactory(), processors=[stdlib.render_to_log_kwargs]) logging.config.dictConfig(LOGGING) logger = logging.getLogger("proxyCheck") return logger
def setup_logger(level=INFO, style: str = "keys"): """setup logging""" configure( processors=[stdlib.add_log_level, stdlib.ProcessorFormatter.wrap_for_formatter], logger_factory=stdlib.LoggerFactory(), ) formatter = stdlib.ProcessorFormatter(processor=dev.ConsoleRenderer()) formatter.processor = processors.KeyValueRenderer() if style == "json": formatter.processor = processors.JSONRenderer() else: formatter.processor = processors.KeyValueRenderer() handler = StreamHandler() handler.setFormatter(formatter) root_logger = getLogger() root_logger.addHandler(handler) root_logger.setLevel(level)
def configure_logging(cfg_type): # Get the logging config log_cfg = None if cfg_type.upper() in log_cfgs: log_cfg = log_cfgs[cfg_type.upper()] else: log_cfg = log_cfgs['LOCAL'] logging.config.dictConfig(log_cfg) # Configure the logger configure(context_class=threadlocal.wrap_dict(dict), logger_factory=stdlib.LoggerFactory(), wrapper_class=stdlib.BoundLogger, processors=[ stdlib.filter_by_level, stdlib.add_logger_name, stdlib.add_log_level, stdlib.PositionalArgumentsFormatter(), processors.TimeStamper(fmt="iso"), processors.StackInfoRenderer(), processors.format_exc_info, processors.UnicodeDecoder(), stdlib.render_to_log_kwargs ])
def configure_structlog(log_level='WARNING', log_file='kiwicom_wrap.log'): lvl = getattr(logging, log_level.upper()) configure( processors=[ # _drop_debug_logs, stdlib.add_log_level, stdlib.PositionalArgumentsFormatter(), _unix_timestamper, structlog_pretty.NumericRounder(), processors.format_exc_info, processors.UnicodeDecoder(), processors.JSONRenderer(), ], logger_factory=stdlib.LoggerFactory(), wrapper_class=stdlib.BoundLogger, ) handler = logging.FileHandler(filename=log_file) logger = get_logger('kiwiwrap') logger.setLevel(lvl) logger.addHandler(handler) return logger
def get_logger() -> Any: global LOGGER if not LOGGER: from structlog import configure, processors, stdlib, threadlocal, get_logger from pythonjsonlogger import jsonlogger logging.config.dictConfig(dict_config) configure( context_class=threadlocal.wrap_dict(dict), logger_factory=stdlib.LoggerFactory(), wrapper_class=stdlib.BoundLogger, processors=[ # Filter only the required log levels into the log output stdlib.filter_by_level, # Adds logger=module_name (e.g __main__) stdlib.add_logger_name, # Uppercase structlog's event name which shouldn't be convoluted with AWS events. event_uppercase, # Censor secure data censor_header, # Allow for string interpolation stdlib.PositionalArgumentsFormatter(), # Render timestamps to ISO 8601 processors.TimeStamper(fmt="iso"), # Include the stack dump when stack_info=True processors.StackInfoRenderer(), # Include the application exception when exc_info=True # e.g log.exception() or log.warning(exc_info=True)'s behavior processors.format_exc_info, # Decodes the unicode values in any kv pairs processors.UnicodeDecoder(), # Creates the necessary args, kwargs for log() stdlib.render_to_log_kwargs, ], cache_logger_on_first_use=True, ) LOGGER = get_logger() return LOGGER
'class': 'pythonjsonlogger.jsonlogger.JsonFormatter' } }, 'handlers': { 'json': { 'class': 'logging.StreamHandler', 'formatter': 'json' } }, 'loggers': { '': { 'handlers': ['json'], 'level': logging.DEBUG } } }) configure(context_class=threadlocal.wrap_dict(dict), logger_factory=stdlib.LoggerFactory(), wrapper_class=stdlib.BoundLogger, processors=[ stdlib.filter_by_level, stdlib.add_logger_name, stdlib.add_log_level, stdlib.PositionalArgumentsFormatter(), processors.TimeStamper(fmt="iso"), processors.StackInfoRenderer(), processors.format_exc_info, processors.UnicodeDecoder(), stdlib.render_to_log_kwargs ]) log = structlog.getLogger()
def get_logging_conf() -> dict: """ This function returns the logging config as.py a dict :return: logging conf """ filename = ( "/home/as/mosquito_monitor.log" ) dir_name = osp.dirname(osp.normpath(filename)) pathlib.Path(dir_name).mkdir(parents=True, exist_ok=True) try: logging_conf = { "version": 1, "formatters": { "simple": { "format": "%(levelname)-6s :: %(name)-5s :: " "%(funcName)-5s :: %(message)s" }, "precise": { "format": "%(asctime)s :: %(levelname)-6s :: %(name)-5s ::" " %(funcName)-5s :: %(message)s" }, 'json_formatter': { 'format': '%(message)s %(lineno)d ' '%(funcName)s %(filename)s', 'class': 'pythonjsonlogger.jsonlogger.JsonFormatter' } }, "handlers": { "console": { "class": "logging.StreamHandler", "level": "DEBUG", "formatter": "simple" }, 'json': { 'formatter': 'json_formatter', 'backupCount': 4, 'class': 'logging.handlers.TimedRotatingFileHandler', 'encoding': 'ASCII', 'filename': filename, 'interval': 1, 'when': 'midnight', 'level': 'DEBUG' } }, "loggers": { "MOSQUITO_MONITOR": { "level": "DEBUG", "propagate": "no", "handlers": ["json", "console"] }, "local_mqtt_client.local_mqtt_client": { "level": "DEBUG", "propagate": "no", "handlers": ["json", "console"] } } } except SyntaxError as invalid_syntax_exception: raise ConfigException( "Invalid config provided, {}".format(invalid_syntax_exception) ) else: configure( context_class=threadlocal.wrap_dict(dict), logger_factory=stdlib.LoggerFactory(), wrapper_class=stdlib.BoundLogger, processors=[ stdlib.filter_by_level, stdlib.add_logger_name, stdlib.add_log_level, stdlib.PositionalArgumentsFormatter(), processors.TimeStamper(fmt='iso'), processors.StackInfoRenderer(), processors.format_exc_info, processors.UnicodeDecoder(), stdlib.render_to_log_kwargs, ] ) return logging_conf
def configure_logging(debug=False, syslog=False, silenced_loggers=None, level_overrides=None): if silenced_loggers is None: silenced_loggers = [] if level_overrides is None: level_overrides = {} level = 'DEBUG' if debug else 'INFO' renderers = [ dev.ConsoleRenderer(), ] if debug else [ logstash_processor, processors.JSONRenderer(separators=(',', ':')), add_syslog_program(syslog), ] structlog_processors = [ stdlib.filter_by_level, stdlib.add_logger_name, stdlib.add_log_level, fix_logger_name, format_request, ensure_event, stdlib.PositionalArgumentsFormatter(), processors.TimeStamper(fmt="ISO", key='@timestamp'), processors.StackInfoRenderer(), processors.format_exc_info, ] + renderers configure( processors=structlog_processors, context_class=dict, logger_factory=stdlib.LoggerFactory(), wrapper_class=stdlib.BoundLogger, cache_logger_on_first_use=True, ) structlog = {'handlers': ['raw'], 'level': level, 'propagate': False} null = {'handlers': ['null'], 'propagate': False} loggers = { l: root(level_overrides.get(l, level)) for l, _, _ in logging_tree.tree()[2] } loggers['feedhq'] = structlog for nulled_logger in silenced_loggers: loggers[nulled_logger] = null raw = { 'level': level, 'class': 'logging.handlers.SysLogHandler', 'address': '/dev/log', 'facility': 'local0', } if syslog else { 'level': level, 'class': 'logging.StreamHandler', } return { 'version': 1, 'level': level, 'handlers': { 'root': { 'level': level, '()': StructlogHandler, }, 'raw': raw, 'null': { 'class': 'logging.NullHandler', }, }, 'loggers': loggers, 'root': root(level), }