def configure_logging( logger_level_config: Dict[str, str] = None, colorize: bool = True, log_json: bool = False, log_file: str = None, disable_debug_logfile: bool = False, debug_log_file_name: str = None, _first_party_packages: FrozenSet[str] = _FIRST_PARTY_PACKAGES, cache_logger_on_first_use: bool = True, ): structlog.reset_defaults() logger_level_config = logger_level_config or dict() logger_level_config.setdefault('filelock', 'ERROR') logger_level_config.setdefault('', DEFAULT_LOG_LEVEL) processors = [ structlog.stdlib.add_logger_name, structlog.stdlib.add_log_level, structlog.stdlib.PositionalArgumentsFormatter(), structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S"), structlog.processors.StackInfoRenderer(), structlog.processors.format_exc_info, ] if log_json: formatter = 'json' elif colorize and not log_file: formatter = 'colorized' else: formatter = 'plain' redact = redactor({ re.compile(r'\b(access_?token=)([a-z0-9_-]+)', re.I): r'\1<redacted>', }) _wrap_tracebackexception_format(redact) enabled_log_handlers = [] if log_file: enabled_log_handlers.append('file') else: # even though the handler is not enabled, it's configured, and the file # must not be None log_file = tempfile.mktemp() enabled_log_handlers.append('default') if not disable_debug_logfile: enabled_log_handlers.append('debug-info') if debug_log_file_name is None: time = datetime.datetime.utcnow().isoformat() debug_log_file_name = f'raiden-debug_{time}.log' logging.config.dictConfig( { 'version': 1, 'disable_existing_loggers': False, 'filters': { 'user_filter': { '()': RaidenFilter, 'log_level_config': logger_level_config, }, 'raiden_debug_file_filter': { '()': RaidenFilter, 'log_level_config': { '': DEFAULT_LOG_LEVEL, 'raiden': 'DEBUG', }, }, }, 'formatters': { 'plain': { '()': structlog.stdlib.ProcessorFormatter, 'processor': _chain(structlog.dev.ConsoleRenderer(colors=False), redact), 'foreign_pre_chain': processors, }, 'json': { '()': structlog.stdlib.ProcessorFormatter, 'processor': _chain(structlog.processors.JSONRenderer(), redact), 'foreign_pre_chain': processors, }, 'colorized': { '()': structlog.stdlib.ProcessorFormatter, 'processor': _chain(structlog.dev.ConsoleRenderer(colors=True), redact), 'foreign_pre_chain': processors, }, 'debug': { '()': structlog.stdlib.ProcessorFormatter, 'processor': _chain(structlog.processors.JSONRenderer(), redact), 'foreign_pre_chain': processors, }, }, 'handlers': { 'file': { 'class': 'logging.handlers.WatchedFileHandler', 'filename': log_file, 'level': 'DEBUG', 'formatter': formatter, 'filters': ['user_filter'], }, 'default': { 'class': 'logging.StreamHandler', 'level': 'DEBUG', 'formatter': formatter, 'filters': ['user_filter'], }, 'debug-info': { 'class': 'logging.handlers.RotatingFileHandler', 'filename': debug_log_file_name, 'level': 'DEBUG', 'formatter': 'debug', 'maxBytes': MAX_LOG_FILE_SIZE, 'backupCount': LOG_BACKUP_COUNT, 'filters': ['raiden_debug_file_filter'], }, }, 'loggers': { '': { 'handlers': enabled_log_handlers, 'propagate': True, }, }, }, ) structlog.configure( processors=processors + [ structlog.stdlib.ProcessorFormatter.wrap_for_formatter, ], wrapper_class=structlog.stdlib.BoundLogger, logger_factory=structlog.stdlib.LoggerFactory(), cache_logger_on_first_use=cache_logger_on_first_use, ) # set logging level of the root logger to DEBUG, to be able to intercept # all messages, which are then be filtered by the `RaidenFilter` structlog.get_logger('').setLevel( logger_level_config.get('', DEFAULT_LOG_LEVEL)) for package in _first_party_packages: structlog.get_logger(package).setLevel('DEBUG') # rollover RotatingFileHandler on startup, to split logs also per-session root = logging.getLogger() for handler in root.handlers: if isinstance(handler, logging.handlers.RotatingFileHandler): handler.flush() if os.stat(handler.baseFilename).st_size > 0: handler.doRollover() # fix logging of py-evm (it uses a custom Trace logger from logging library) # if py-evm is not used this will throw, hence the try-catch block # for some reason it didn't work to put this into conftest.py try: from eth.tools.logging import setup_trace_logging setup_trace_logging() except ImportError: pass
def configure_logging( logger_level_config: Dict[str, str] = None, colorize: bool = True, log_json: bool = False, log_file: str = None, disable_debug_logfile: bool = False, debug_log_file_path: str = None, cache_logger_on_first_use: bool = True, _first_party_packages: FrozenSet[str] = _FIRST_PARTY_PACKAGES, _debug_log_file_additional_level_filters: Dict[str, str] = None, ) -> None: structlog.reset_defaults() logger_level_config = logger_level_config or dict() logger_level_config.setdefault("filelock", "ERROR") logger_level_config.setdefault("", DEFAULT_LOG_LEVEL) processors = [ structlog.stdlib.add_logger_name, structlog.stdlib.add_log_level, add_greenlet_name, structlog.stdlib.PositionalArgumentsFormatter(), structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S.%f"), structlog.processors.StackInfoRenderer(), structlog.processors.format_exc_info, ] if log_json: formatter = "json" elif colorize and not log_file: formatter = "colorized" else: formatter = "plain" redact = redactor(LOG_BLACKLIST) handlers: Dict[str, Any] = dict() if log_file: handlers["file"] = { "class": "logging.handlers.WatchedFileHandler", "filename": log_file, "level": "DEBUG", "formatter": formatter, "filters": ["user_filter"], } else: handlers["default"] = { "class": "logging.StreamHandler", "level": "DEBUG", "formatter": formatter, "filters": ["user_filter"], } if not disable_debug_logfile: debug_logfile_path = configure_debug_logfile_path(debug_log_file_path) handlers["debug-info"] = { "class": "logging.handlers.RotatingFileHandler", "filename": debug_logfile_path, "level": "DEBUG", "formatter": "debug", "maxBytes": MAX_LOG_FILE_SIZE, "backupCount": LOG_BACKUP_COUNT, "filters": ["raiden_debug_file_filter"], } logging.config.dictConfig({ "version": 1, "disable_existing_loggers": False, "filters": { "user_filter": { "()": RaidenFilter, "log_level_config": logger_level_config }, "raiden_debug_file_filter": { "()": RaidenFilter, "log_level_config": { "": DEFAULT_LOG_LEVEL, "raiden": "DEBUG", **(_debug_log_file_additional_level_filters or {}), }, }, }, "formatters": { "plain": { "()": structlog.stdlib.ProcessorFormatter, "processor": _chain(structlog.dev.ConsoleRenderer(colors=False), redact), "foreign_pre_chain": processors, }, "json": { "()": structlog.stdlib.ProcessorFormatter, "processor": _chain(structlog.processors.JSONRenderer(), redact), "foreign_pre_chain": processors, }, "colorized": { "()": structlog.stdlib.ProcessorFormatter, "processor": _chain(structlog.dev.ConsoleRenderer(colors=True), redact), "foreign_pre_chain": processors, }, "debug": { "()": structlog.stdlib.ProcessorFormatter, "processor": _chain(structlog.processors.JSONRenderer(), redact), "foreign_pre_chain": processors, }, }, "handlers": handlers, "loggers": { "": { "handlers": handlers.keys(), "propagate": True } }, }) structlog.configure( processors=processors + [structlog.stdlib.ProcessorFormatter.wrap_for_formatter], wrapper_class=structlog.stdlib.BoundLogger, logger_factory=structlog.stdlib.LoggerFactory(), cache_logger_on_first_use=cache_logger_on_first_use, ) # set logging level of the root logger to DEBUG, to be able to intercept # all messages, which are then be filtered by the `RaidenFilter` structlog.get_logger("").setLevel( logger_level_config.get("", DEFAULT_LOG_LEVEL)) for package in _first_party_packages: structlog.get_logger(package).setLevel("DEBUG") # rollover RotatingFileHandler on startup, to split logs also per-session root = logging.getLogger() for handler in root.handlers: if isinstance(handler, logging.handlers.RotatingFileHandler): handler.flush() if os.stat(handler.baseFilename).st_size > 0: handler.doRollover() # fix logging of py-evm (it uses a custom Trace logger from logging library) # if py-evm is not used this will throw, hence the try-catch block # for some reason it didn't work to put this into conftest.py try: from eth.tools.logging import setup_trace_logging setup_trace_logging() except ImportError: pass
import pkg_resources import sys from eth.tools.logging import (setup_trace_logging) # # Setup TRACE level logging. # # This needs to be done before the other imports setup_trace_logging() from eth.chains import ( # noqa: F401 Chain, MainnetChain, MainnetTesterChain, RopstenChain, ) # # Ensure we can reach 1024 frames of recursion # sys.setrecursionlimit(1024 * 10) __version__ = pkg_resources.get_distribution("py-evm").version
def configure_logging( logger_level_config: Dict[str, str] = None, colorize: bool = True, log_json: bool = False, log_file: str = None, disable_debug_logfile: bool = False, ): structlog.reset_defaults() logger_level_config = logger_level_config or dict() logger_level_config.setdefault('filelock', 'ERROR') logger_level_config.setdefault('', DEFAULT_LOG_LEVEL) processors = [ structlog.stdlib.add_logger_name, structlog.stdlib.add_log_level, structlog.stdlib.PositionalArgumentsFormatter(), structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S"), structlog.processors.StackInfoRenderer(), structlog.processors.format_exc_info, ] formatter = 'colorized' if colorize and not log_file else 'plain' if log_json: formatter = 'json' redact = redactor({ re.compile(r'\b(access_?token=)([a-z0-9_-]+)', re.I): r'\1<redacted>', }) _wrap_tracebackexception_format(redact) log_handler = _get_log_handler( formatter, log_file, ) if disable_debug_logfile: combined_log_handlers = log_handler else: debug_log_file_handler = _get_log_file_handler() combined_log_handlers = {**log_handler, **debug_log_file_handler} logging.config.dictConfig( { 'version': 1, 'disable_existing_loggers': False, 'filters': { 'log_level_filter': { '()': RaidenFilter, 'log_level_config': logger_level_config, }, 'log_level_debug_filter': { '()': RaidenFilter, 'log_level_config': { '': DEFAULT_LOG_LEVEL, 'raiden': 'DEBUG' }, }, }, 'formatters': { 'plain': { '()': structlog.stdlib.ProcessorFormatter, 'processor': _chain(structlog.dev.ConsoleRenderer(colors=False), redact), 'foreign_pre_chain': processors, }, 'json': { '()': structlog.stdlib.ProcessorFormatter, 'processor': _chain(structlog.processors.JSONRenderer(), redact), 'foreign_pre_chain': processors, }, 'colorized': { '()': structlog.stdlib.ProcessorFormatter, 'processor': _chain(structlog.dev.ConsoleRenderer(colors=True), redact), 'foreign_pre_chain': processors, }, 'debug': { '()': structlog.stdlib.ProcessorFormatter, 'processor': _chain(structlog.dev.ConsoleRenderer(colors=False), redact), 'foreign_pre_chain': processors, }, }, 'handlers': combined_log_handlers, 'loggers': { '': { 'handlers': list(combined_log_handlers.keys()), 'propagate': True, }, }, }, ) structlog.configure( processors=processors + [ structlog.stdlib.ProcessorFormatter.wrap_for_formatter, ], wrapper_class=structlog.stdlib.BoundLogger, logger_factory=structlog.stdlib.LoggerFactory(), cache_logger_on_first_use=True, ) # set logging level of the root logger to DEBUG, to be able to intercept # all messages, which are then be filtered by the `RaidenFilter` structlog.get_logger('').setLevel( logger_level_config.get('', DEFAULT_LOG_LEVEL)) structlog.get_logger('raiden').setLevel('DEBUG') # rollover RotatingFileHandler on startup, to split logs also per-session root = logging.getLogger() for handler in root.handlers: if isinstance(handler, logging.handlers.RotatingFileHandler): handler.flush() if os.stat(handler.baseFilename).st_size > 0: handler.doRollover() # fix logging of py-evm (it uses a custom Trace logger from logging library) # if py-evm is not used this will throw, hence the try-catch block # for some reason it didn't work to put this into conftest.py try: from eth.tools.logging import setup_trace_logging setup_trace_logging() except ImportError: pass
def setup_logging(): logging.basicConfig( level=logging.DEBUG, format='%(levelname)s[%(threadName)s] %(message)s', ) logging.getLogger('urllib3').setLevel(logging.CRITICAL) logging.getLogger('botocore').setLevel(logging.CRITICAL) structlog.configure_once( context_class=structlog.threadlocal.wrap_dict(dict), logger_factory=structlog.stdlib.LoggerFactory(), wrapper_class=structlog.stdlib.BoundLogger, processors=[ structlog.stdlib.filter_by_level, structlog.stdlib.add_logger_name, structlog.stdlib.add_log_level, structlog.stdlib.PositionalArgumentsFormatter(), structlog.processors.TimeStamper(fmt="iso"), structlog.processors.StackInfoRenderer(), structlog.processors.format_exc_info, structlog.processors.UnicodeDecoder(), structlog.stdlib.render_to_log_kwargs ]) level_map = { 'CRITICAL': 50, 'ERROR': 40, 'WARNING': 30, 'INFO': 20, 'DEBUG': 10, 'TRACE': 5, 'NOTSET': 0, } dict_config = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'json': { 'format': '%(message)s %(threadName)s %(lineno)d %(pathname)s ', 'class': 'pythonjsonlogger.jsonlogger.JsonFormatter' } }, 'handlers': { 'json': { 'class': 'logging.StreamHandler', 'formatter': 'json' }, 'file': { 'class': 'logging.handlers.RotatingFileHandler', 'formatter': 'json', 'filename': '/var/log/qsp-protocol/qsp-protocol.log', 'mode': 'a', 'maxBytes': 10485760, 'backupCount': 5 } }, 'loggers': { '': { 'handlers': ['json', 'file'], 'level': level_map["DEBUG"], } } } logging.config.dictConfig(dict_config) setup_trace_logging()