Beispiel #1
0
        def call_with_common_options_initialized(**params: Any) -> Callable:
            params["private_key"] = _open_keystore(params.pop("keystore_file"),
                                                   params.pop("password"))

            # Don't print traceback on KeyboardInterrupt
            gevent.get_hub().NOT_ERROR += (KeyboardInterrupt, )

            try:
                setup_logging(log_level=params.pop("log_level"),
                              log_json=params.pop("log_json"))
                if not params["state_db"]:
                    # only RC has `chain_id`, MS and PFS have `web3` object
                    chain_id = str(
                        params.get("chain_id") or params["web3"].net.version)
                    contracts_version = CONTRACTS_VERSION.replace(".", "_")
                    filename = f"{app_name}-{chain_id}-{contracts_version}.db"
                    data_dir = click.get_app_dir(app_name)
                    params["state_db"] = os.path.join(data_dir, filename)

                # Need to delete the `chain_id` key
                if params.get("chain_id") is not None:
                    del params["chain_id"]

                return func(**params)
            finally:
                structlog.reset_defaults()
Beispiel #2
0
    def test_cloud_logging_log_key_added(
        caplog: LogCaptureFixture,
        monkeypatch: MonkeyPatch,
        test_data: dict,
        cloud_logging_compatibility_mode_env_var: str,
    ) -> None:
        # Enable Cloud Logging compatibility mode
        structlog.reset_defaults()
        monkeypatch.setenv(
            "_STRUCTLOG_SENTRY_LOGGER_STDLIB_BASED_LOGGER_MODE_ON",
            "ANY_VALUE")
        monkeypatch.setenv(cloud_logging_compatibility_mode_env_var,
                           "ANY_VALUE")

        # Initialize Cloud Logging-compatible logger and perform logging
        logger = structlog_sentry_logger.get_logger()
        logger.debug("Testing Cloud Logging-compatible logger", **test_data)

        assert caplog.records
        # Parse logs and validate schema
        for test_log in [record.msg for record in caplog.records]:
            if isinstance(test_log, dict):  # structlog logger
                for k in test_data:
                    assert test_log[k] == test_data[k]
                assert "severity" in test_log
                assert test_log["level"] == test_log["severity"]
            else:
                raise NotImplementedError(
                    "Captured log message not a supported type")
Beispiel #3
0
def initialize_logging():
    log_level = os.environ.get("NGT_LOG_LEVEL") or "DEBUG"
    for_development = os.environ.get("AWS_EXECUTION_ENV") is None
    structlog.reset_defaults()
    root_logger = logging.getLogger("")
    for h in root_logger.handlers:
        root_logger.removeHandler(h)

    if for_development:
        formatter = _configure_local_dev_formatter()
    else:
        formatter = _configure_aws_lambda_formatter()

    default_handler = logging.StreamHandler()
    default_handler.setFormatter(formatter)
    root_logger.addHandler(default_handler)
    root_logger.setLevel(log_level)
    root_logger.propagate = True

    # We support logging third party libraries as well; alter these log levels if needed.
    for logger_name in [
            "boto3",
            "botocore",
            "asyncio",
            "urllib",
            "urllib3.connectionpool",
            "s3transfer.utils",
            "s3transfer.tasks",
            "s3transfer.futures",
    ]:
        third_party_logger = logging.getLogger(logger_name)
        for h in third_party_logger.handlers:
            third_party_logger.removeHandler(h)
        third_party_logger.addHandler(default_handler)
        third_party_logger.setLevel("WARNING")
Beispiel #4
0
 def call_with_common_options_initialized(**params: Any) -> Callable:
     params["private_key"] = _open_keystore(
         params.pop("keystore_file"), params.pop("password")
     )
     try:
         setup_logging(params.pop("log_level"))
         return func(**params)
     finally:
         structlog.reset_defaults()
Beispiel #5
0
 def setup(self, monkeypatch: MonkeyPatch) -> None:
     structlog.reset_defaults()
     monkeypatch.delenv(
         "STRUCTLOG_SENTRY_LOGGER_LOCAL_DEVELOPMENT_LOGGING_MODE_ON",
         raising=False)
     monkeypatch.delenv(
         "_STRUCTLOG_SENTRY_LOGGER_STDLIB_BASED_LOGGER_MODE_ON",
         raising=False)
     for env_var in self.cloud_logging_compatibility_mode_env_vars:
         monkeypatch.delenv(env_var, raising=False)
Beispiel #6
0
    def test_reset(self, proxy):
        x = stub()
        configure(processors=[x], context_class=dict, wrapper_class=Wrapper)
        structlog.reset_defaults()
        b = proxy.bind()

        assert [x] != b._processors
        assert _BUILTIN_DEFAULT_PROCESSORS == b._processors
        assert isinstance(b, _BUILTIN_DEFAULT_WRAPPER_CLASS)
        assert _BUILTIN_DEFAULT_CONTEXT_CLASS == b._context.__class__
        assert _BUILTIN_DEFAULT_LOGGER_FACTORY is _CONFIG.logger_factory
Beispiel #7
0
    def test_reset(self, proxy):
        x = stub()
        configure(processors=[x], context_class=dict, wrapper_class=Wrapper)
        structlog.reset_defaults()
        b = proxy.bind()

        assert [x] != b._processors
        assert _BUILTIN_DEFAULT_PROCESSORS == b._processors
        assert isinstance(b, _BUILTIN_DEFAULT_WRAPPER_CLASS)
        assert _BUILTIN_DEFAULT_CONTEXT_CLASS == b._context.__class__
        assert _BUILTIN_DEFAULT_LOGGER_FACTORY is _CONFIG.logger_factory
Beispiel #8
0
def configure_logging(log_path: str) -> None:
    processors = [
        structlog.stdlib.add_logger_name,
        structlog.stdlib.add_log_level,
        structlog.stdlib.PositionalArgumentsFormatter(),
        structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S.%f"),
        structlog.processors.StackInfoRenderer(),
        structlog.processors.format_exc_info,
        structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
    ]
    structlog.reset_defaults()
    logging.config.dictConfig({
        "version": 1,
        "disable_existing_loggers": False,
        "formatters": {
            "colorized-formatter": {
                "()": structlog.stdlib.ProcessorFormatter,
                "processor": structlog.dev.ConsoleRenderer(colors=True),
                "foreign_pre_chain": processors,
            },
            "json": {
                "()": structlog.stdlib.ProcessorFormatter,
                "processor": structlog.processors.JSONRenderer(),
                "foreign_pre_chain": processors,
            },
        },
        "handlers": {
            "colorized-handler": {
                "class": "logging.StreamHandler",
                "level": "DEBUG",
                "formatter": "colorized-formatter",
            },
            "debug-info": {
                "class": "logging.FileHandler",
                "filename": log_path,
                "level": "DEBUG",
                "formatter": "json",
            },
        },
        "loggers": {
            "": {
                "handlers": ["colorized-handler", "debug-info"],
                "propagate": True
            }
        },
    })
    structlog.configure(
        processors=processors,
        wrapper_class=structlog.stdlib.BoundLogger,
        logger_factory=structlog.stdlib.LoggerFactory(),
    )
    log.setLevel("DEBUG")
Beispiel #9
0
    def configure_logging(self):
        if self.app.testing:
            structlog.reset_defaults()

        disabled = [
            "docker.utils.config",
            "docker.auth",
            "docker.api.build",
            "docker.api.swarm",
            "docker.api.image",
            "rq.worker",
            "werkzeug",
            "requests",
            "urllib3",
        ]

        for logger in disabled:
            log = logging.getLogger(logger)
            log.setLevel(logging.ERROR)
            log.disabled = True
        self.app.logger.disabled = True

        logging.basicConfig(level=self.log_level,
                            stream=sys.stdout,
                            format="%(message)s")

        chain = [
            filter_by_level,
            add_log_level,
            add_logger_name,
            TimeStamper(fmt="iso"),
            StackInfoRenderer(),
            format_exc_info,
            JSONRenderer(indent=1, sort_keys=True),
        ]

        logger = logging.getLogger(__name__)

        if self.testing:
            chain = []
            logger = structlog.ReturnLogger()

        log = structlog.wrap_logger(
            logger,
            processors=chain,
            context_class=dict,
            wrapper_class=structlog.stdlib.BoundLogger,
            # cache_logger_on_first_use=True,
        )
        self.logger = log
        self.app.logger = self.logger
Beispiel #10
0
def setup_logging(config=None,
                  level=None,
                  stream=None,
                  filename=None,
                  filemode=None):
    '''sets up both logging and structlog.'''
    global handler, already_configured

    asnake_root_logger = logging.getLogger('asnake')

    if handler:
        asnake_root_logger.removeHandler(handler)

    if stream and filename:
        raise RuntimeError(
            "stream and filename are mutually exclusive and cannot be combined, pick one or the other"
        )
    from_env = os.environ.get('ASNAKE_LOG_CONFIG', None)
    default = configurations.get(from_env, DEFAULT_CONFIG)

    if not config:
        config = copy_config(default)
        if filename:
            del config['logging']['stream']

    level = level or config.get('level', None) or logging.INFO
    if isinstance(level, str) and level_re.match(level):
        level = getattr(logging, level.upper())

    # Forward what's needed to put the log places
    if stream:
        config['logging']['stream'] = stream
    if filemode:
        config['logging']['filemode'] = filemode
    if filename:
        config['logging']['filename'] = filename

    if 'filename' in config['logging']:
        handler = logging.FileHandler(config['logging']['filename'],
                                      mode=config['logging'].get(
                                          'filemode', 'a'))
    if 'stream' in config['logging']:
        handler = logging.StreamHandler(config['logging']['stream'])

    asnake_root_logger.addHandler(handler)
    asnake_root_logger.setLevel(level)
    structlog.reset_defaults()
    structlog.configure(**config['structlog'])
    already_configured = True
Beispiel #11
0
    def test_get_config_is_configured(self):
        """
        Return value of structlog.get_config() works as input for
        structlog.configure(). is_configured() reflects the state of
        configuration.
        """
        assert False is structlog.is_configured()

        structlog.configure(**structlog.get_config())

        assert True is structlog.is_configured()

        structlog.reset_defaults()

        assert False is structlog.is_configured()
Beispiel #12
0
    def test_get_config_is_configured(self):
        """
        Return value of structlog.get_config() works as input for
        structlog.configure(). is_configured() reflects the state of
        configuration.
        """
        assert False is structlog.is_configured()

        structlog.configure(**structlog.get_config())

        assert True is structlog.is_configured()

        structlog.reset_defaults()

        assert False is structlog.is_configured()
Beispiel #13
0
def configure_for_pf():
    """
    Configure structlog to use ProcessorFormatter.

    Reset both structlog and logging setting after the test.
    """
    configure(
        processors=[add_log_level, ProcessorFormatter.wrap_for_formatter],
        logger_factory=LoggerFactory(),
        wrapper_class=BoundLogger,
    )

    yield

    logging.basicConfig()
    reset_defaults()
Beispiel #14
0
def configure_structlog():
    """
    Configures cleanly structlog for each test method.
    """
    structlog.reset_defaults()
    structlog.configure(
        processors=[
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            structlog.processors.KeyValueRenderer(),
        ],
        wrapper_class=structlog.BoundLogger,
        context_class=dict,
        #logger_factory=LoggerFactory(),
        cache_logger_on_first_use=False,
    )
Beispiel #15
0
def _trimmed_logging(logger_level_config):
    structlog.reset_defaults()

    logger_level_config = logger_level_config or dict()
    logger_level_config.setdefault('filelock', 'ERROR')
    logger_level_config.setdefault('', 'DEBUG')

    processors = [
        structlog.stdlib.PositionalArgumentsFormatter(),
        structlog.processors.StackInfoRenderer(),
        structlog.processors.format_exc_info,
    ]

    logging.config.dictConfig(
        {
            'version': 1,
            'disable_existing_loggers': False,
            'formatters': {
                'plain': {
                    '()': structlog.stdlib.ProcessorFormatter,
                    'processor': structlog.dev.ConsoleRenderer(colors=False),
                    'foreign_pre_chain': processors,
                },
            },
            'handlers': {
                'default': {
                    'class': 'logging.StreamHandler',
                    'level': 'DEBUG',
                    'formatter': 'plain',
                },
            },
            'loggers': {
                '': {
                    'handlers': ['default'],
                    'propagate': True,
                },
            },
        },
    )
    structlog.configure(
        processors=processors + [
            structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
        ],
        wrapper_class=structlog.stdlib.BoundLogger,
        logger_factory=structlog.stdlib.LoggerFactory(),
        cache_logger_on_first_use=True,
    )
Beispiel #16
0
def _trimmed_logging(logger_level_config):
    structlog.reset_defaults()

    logger_level_config = logger_level_config or dict()
    logger_level_config.setdefault('filelock', 'ERROR')
    logger_level_config.setdefault('', 'DEBUG')

    processors = [
        structlog.stdlib.PositionalArgumentsFormatter(),
        structlog.processors.StackInfoRenderer(),
        structlog.processors.format_exc_info,
    ]

    logging.config.dictConfig(
        {
            'version': 1,
            'disable_existing_loggers': False,
            'formatters': {
                'plain': {
                    '()': structlog.stdlib.ProcessorFormatter,
                    'processor': structlog.dev.ConsoleRenderer(colors=False),
                    'foreign_pre_chain': processors,
                },
            },
            'handlers': {
                'default': {
                    'class': 'logging.StreamHandler',
                    'level': 'DEBUG',
                    'formatter': 'plain',
                },
            },
            'loggers': {
                '': {
                    'handlers': ['default'],
                    'propagate': True,
                },
            },
        },
    )
    structlog.configure(
        processors=processors + [
            structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
        ],
        wrapper_class=structlog.stdlib.BoundLogger,
        logger_factory=structlog.stdlib.LoggerFactory(),
        cache_logger_on_first_use=True,
    )
def _benchmark_runner(benchmark: BenchmarkFixture, test_cases: dict) -> None:
    structlog.reset_defaults()
    structlog.configure(
        cache_logger_on_first_use=True,
        wrapper_class=structlog.make_filtering_bound_logger(logging.DEBUG),
        processors=[
            structlog.threadlocal.merge_threadlocal,
            structlog.processors.add_log_level,
            structlog.processors.format_exc_info,
            structlog.processors.TimeStamper(fmt="iso", utc=True),
            structlog.processors.JSONRenderer(serializer=orjson.dumps,
                                              option=orjson.OPT_NON_STR_KEYS),
        ],
        logger_factory=structlog.BytesLoggerFactory(),
    )
    logger = structlog.get_logger()
    benchmark(utils.lots_of_logging, logger=logger, test_cases=test_cases)
Beispiel #18
0
def configure_structlog():
    """Configures cleanly structlog for each test method.

    Approach taken from author for compatibility with pytest.
    See https://github.com/hynek/structlog/issues/76#issuecomment-240373958
    """
    structlog.reset_defaults()
    structlog.configure(
        processors=[
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            structlog.processors.KeyValueRenderer(),
        ],
        wrapper_class=structlog.stdlib.BoundLogger,
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        cache_logger_on_first_use=False,
    )
Beispiel #19
0
def _trimmed_logging(logger_level_config):
    structlog.reset_defaults()

    logger_level_config = logger_level_config or dict()
    logger_level_config.setdefault("filelock", "ERROR")
    logger_level_config.setdefault("", "DEBUG")

    processors = [
        structlog.stdlib.PositionalArgumentsFormatter(),
        structlog.processors.StackInfoRenderer(),
        structlog.processors.format_exc_info,
    ]

    logging.config.dictConfig({
        "version": 1,
        "disable_existing_loggers": False,
        "formatters": {
            "plain": {
                "()": structlog.stdlib.ProcessorFormatter,
                "processor": structlog.dev.ConsoleRenderer(colors=False),
                "foreign_pre_chain": processors,
            }
        },
        "handlers": {
            "default": {
                "class": "logging.StreamHandler",
                "level": "DEBUG",
                "formatter": "plain",
            }
        },
        "loggers": {
            "": {
                "handlers": ["default"],
                "propagate": True
            }
        },
    })
    structlog.configure(
        processors=processors +
        [structlog.stdlib.ProcessorFormatter.wrap_for_formatter],
        wrapper_class=structlog.stdlib.BoundLogger,
        logger_factory=structlog.stdlib.LoggerFactory(),
        cache_logger_on_first_use=True,
    )
Beispiel #20
0
    async def test_integration(self, capsys):
        """
        Configure and log an actual entry.
        """

        configure(
            processors=[add_log_level, JSONRenderer()],
            logger_factory=PrintLogger,
            wrapper_class=AsyncBoundLogger,
            cache_logger_on_first_use=True,
        )

        logger = get_logger()

        await logger.bind(foo="bar").info("baz", x="42")

        assert {
            "foo": "bar",
            "x": "42",
            "event": "baz",
            "level": "info",
        } == json.loads(capsys.readouterr().out)

        reset_defaults()
Beispiel #21
0
def configure_logging(
    level: int = 10,
    log_file: Optional[str] = None,
    color: bool = True,
):
    """Configures the logging system

    Args:
        level: The log level
        log_file: Optional the log file to log the json log to
        color: If the console log should print in colors or not
    """
    # ensure we start from default config
    structlog.reset_defaults()

    timestamper = structlog.processors.TimeStamper(
        utc=True,
        key="timestamp",
    )
    # shared processors for standard lib and structlog
    shared_processors = [
        structlog.stdlib.add_log_level,
        timestamper,
        structlog.stdlib.PositionalArgumentsFormatter(),
        structlog.processors.StackInfoRenderer(),
        structlog.processors.format_exc_info,
    ]

    # processor only for structlog
    processors = [structlog.stdlib.filter_by_level]
    processors.extend(shared_processors)
    processors.append(structlog.stdlib.ProcessorFormatter.wrap_for_formatter)

    handlers = {}
    # configure console logging
    handlers["console"] = {
        "level": "DEBUG",
        "class": "logging.StreamHandler",
        "formatter": "color" if color else "plain",
    }

    # configure file logging
    if log_file is not None:
        handlers["file"] = {
            "level": "DEBUG",
            "class": "logging.handlers.WatchedFileHandler",
            "filename": log_file,
            "formatter": "json",
        }
    # log formatters
    log_formatters = {
        "plain": {
            "()": structlog.stdlib.ProcessorFormatter,
            "processor": structlog.dev.ConsoleRenderer(colors=False),
            "foreign_pre_chain": shared_processors,
        },
        "json": {
            "()": structlog.stdlib.ProcessorFormatter,
            "processor": structlog.processors.JSONRenderer(
                sort_keys=True,
            ),
            "foreign_pre_chain": shared_processors,
        },
    }
    if color:
        log_formatters["color"] = {
            "()": structlog.stdlib.ProcessorFormatter,
            "processor": structlog.dev.ConsoleRenderer(colors=True),
            "foreign_pre_chain": shared_processors,
        }
    # configure standard lib logging
    logging.config.dictConfig(
        {
            "version": 1,
            "disable_existing_loggers": False,
            "formatters": log_formatters,
            "handlers": handlers,
            "loggers": {"": {"handlers": handlers.keys(), "propagate": True}},
        }
    )

    # apply structlog config
    structlog.configure(
        processors=processors,
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True,
    )

    # set the log level
    logger.setLevel(level)
Beispiel #22
0
 def teardown_method(self, method):
     structlog.reset_defaults()
Beispiel #23
0
 def teardown_class(cls):
     reset_defaults()
Beispiel #24
0
def configure_logging(
    logger_level_config: Dict[str, str] = None,
    colorize: bool = True,
    log_json: bool = False,
    log_file: str = None,
    disable_debug_logfile: bool = False,
):
    structlog.reset_defaults()
    if logger_level_config is None:
        logger_level_config = {'': DEFAULT_LOG_LEVEL}
    processors = [
        structlog.stdlib.add_logger_name,
        structlog.stdlib.add_log_level,
        structlog.stdlib.PositionalArgumentsFormatter(),
        structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S"),
        structlog.processors.StackInfoRenderer(),
        structlog.processors.format_exc_info,
    ]

    formatter = 'colorized' if colorize and not log_file else 'plain'
    if log_json:
        formatter = 'json'

    redact = redactor({
        re.compile(r'\b(access_?token=)([a-z0-9_-]+)', re.I):
        r'\1<redacted>',
    })
    _wrap_tracebackexception_format(redact)

    log_handler = _get_log_handler(
        formatter,
        log_file,
    )
    if disable_debug_logfile:
        combined_log_handlers = log_handler
    else:
        debug_log_file_handler = _get_log_file_handler()
        combined_log_handlers = {**log_handler, **debug_log_file_handler}

    logging.config.dictConfig(
        {
            'version': 1,
            'disable_existing_loggers': False,
            'filters': {
                'log_level_filter': {
                    '()': RaidenFilter,
                    'log_level_config': logger_level_config,
                },
                'log_level_debug_filter': {
                    '()': RaidenFilter,
                    'log_level_config': {
                        '': DEFAULT_LOG_LEVEL,
                        'raiden': 'DEBUG'
                    },
                },
            },
            'formatters': {
                'plain': {
                    '()':
                    structlog.stdlib.ProcessorFormatter,
                    'processor':
                    _chain(structlog.dev.ConsoleRenderer(colors=False),
                           redact),
                    'foreign_pre_chain':
                    processors,
                },
                'json': {
                    '()':
                    structlog.stdlib.ProcessorFormatter,
                    'processor':
                    _chain(structlog.processors.JSONRenderer(), redact),
                    'foreign_pre_chain':
                    processors,
                },
                'colorized': {
                    '()':
                    structlog.stdlib.ProcessorFormatter,
                    'processor':
                    _chain(structlog.dev.ConsoleRenderer(colors=True), redact),
                    'foreign_pre_chain':
                    processors,
                },
                'debug': {
                    '()':
                    structlog.stdlib.ProcessorFormatter,
                    'processor':
                    _chain(structlog.dev.ConsoleRenderer(colors=False),
                           redact),
                    'foreign_pre_chain':
                    processors,
                },
            },
            'handlers': combined_log_handlers,
            'loggers': {
                '': {
                    'handlers': list(combined_log_handlers.keys()),
                    'propagate': True,
                },
            },
        }, )
    structlog.configure(
        processors=processors + [
            structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
        ],
        wrapper_class=structlog.stdlib.BoundLogger,
        logger_factory=structlog.stdlib.LoggerFactory(),
        cache_logger_on_first_use=True,
    )

    # set logging level of the root logger to DEBUG, to be able to intercept
    # all messages, which are then be filtered by the `RaidenFilter`
    structlog.get_logger('').setLevel(
        logger_level_config.get('', DEFAULT_LOG_LEVEL))
    structlog.get_logger('raiden').setLevel('DEBUG')

    # rollover RotatingFileHandler on startup, to split logs also per-session
    root = logging.getLogger()
    for handler in root.handlers:
        if isinstance(handler, logging.handlers.RotatingFileHandler):
            handler.flush()
            if os.stat(handler.baseFilename).st_size > 0:
                handler.doRollover()

    # fix logging of py-evm (it uses a custom Trace logger from logging library)
    # if py-evm is not used this will throw, hence the try-catch block
    # for some reason it didn't work to put this into conftest.py
    try:
        from eth.utils.logging import setup_trace_logging
        setup_trace_logging()
    except ModuleNotFoundError:
        pass
Beispiel #25
0
def configure_logging(
    logger_level_config: Dict[str, str] = None,
    colorize: bool = True,
    log_json: bool = False,
    log_file: str = None,
    disable_debug_logfile: bool = False,
    debug_log_file_name: str = None,
    _first_party_packages: FrozenSet[str] = _FIRST_PARTY_PACKAGES,
    cache_logger_on_first_use: bool = True,
):
    structlog.reset_defaults()

    logger_level_config = logger_level_config or dict()
    logger_level_config.setdefault('filelock', 'ERROR')
    logger_level_config.setdefault('', DEFAULT_LOG_LEVEL)

    processors = [
        structlog.stdlib.add_logger_name,
        structlog.stdlib.add_log_level,
        structlog.stdlib.PositionalArgumentsFormatter(),
        structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S"),
        structlog.processors.StackInfoRenderer(),
        structlog.processors.format_exc_info,
    ]

    if log_json:
        formatter = 'json'
    elif colorize and not log_file:
        formatter = 'colorized'
    else:
        formatter = 'plain'

    redact = redactor({
        re.compile(r'\b(access_?token=)([a-z0-9_-]+)', re.I):
        r'\1<redacted>',
    })
    _wrap_tracebackexception_format(redact)

    enabled_log_handlers = []
    if log_file:
        enabled_log_handlers.append('file')
    else:
        # even though the handler is not enabled, it's configured, and the file
        # must not be None
        log_file = tempfile.mktemp()
        enabled_log_handlers.append('default')

    if not disable_debug_logfile:
        enabled_log_handlers.append('debug-info')

    if debug_log_file_name is None:
        time = datetime.datetime.utcnow().isoformat()
        debug_log_file_name = f'raiden-debug_{time}.log'

    logging.config.dictConfig(
        {
            'version': 1,
            'disable_existing_loggers': False,
            'filters': {
                'user_filter': {
                    '()': RaidenFilter,
                    'log_level_config': logger_level_config,
                },
                'raiden_debug_file_filter': {
                    '()': RaidenFilter,
                    'log_level_config': {
                        '': DEFAULT_LOG_LEVEL,
                        'raiden': 'DEBUG',
                    },
                },
            },
            'formatters': {
                'plain': {
                    '()':
                    structlog.stdlib.ProcessorFormatter,
                    'processor':
                    _chain(structlog.dev.ConsoleRenderer(colors=False),
                           redact),
                    'foreign_pre_chain':
                    processors,
                },
                'json': {
                    '()':
                    structlog.stdlib.ProcessorFormatter,
                    'processor':
                    _chain(structlog.processors.JSONRenderer(), redact),
                    'foreign_pre_chain':
                    processors,
                },
                'colorized': {
                    '()':
                    structlog.stdlib.ProcessorFormatter,
                    'processor':
                    _chain(structlog.dev.ConsoleRenderer(colors=True), redact),
                    'foreign_pre_chain':
                    processors,
                },
                'debug': {
                    '()':
                    structlog.stdlib.ProcessorFormatter,
                    'processor':
                    _chain(structlog.processors.JSONRenderer(), redact),
                    'foreign_pre_chain':
                    processors,
                },
            },
            'handlers': {
                'file': {
                    'class': 'logging.handlers.WatchedFileHandler',
                    'filename': log_file,
                    'level': 'DEBUG',
                    'formatter': formatter,
                    'filters': ['user_filter'],
                },
                'default': {
                    'class': 'logging.StreamHandler',
                    'level': 'DEBUG',
                    'formatter': formatter,
                    'filters': ['user_filter'],
                },
                'debug-info': {
                    'class': 'logging.handlers.RotatingFileHandler',
                    'filename': debug_log_file_name,
                    'level': 'DEBUG',
                    'formatter': 'debug',
                    'maxBytes': MAX_LOG_FILE_SIZE,
                    'backupCount': LOG_BACKUP_COUNT,
                    'filters': ['raiden_debug_file_filter'],
                },
            },
            'loggers': {
                '': {
                    'handlers': enabled_log_handlers,
                    'propagate': True,
                },
            },
        }, )
    structlog.configure(
        processors=processors + [
            structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
        ],
        wrapper_class=structlog.stdlib.BoundLogger,
        logger_factory=structlog.stdlib.LoggerFactory(),
        cache_logger_on_first_use=cache_logger_on_first_use,
    )

    # set logging level of the root logger to DEBUG, to be able to intercept
    # all messages, which are then be filtered by the `RaidenFilter`
    structlog.get_logger('').setLevel(
        logger_level_config.get('', DEFAULT_LOG_LEVEL))
    for package in _first_party_packages:
        structlog.get_logger(package).setLevel('DEBUG')

    # rollover RotatingFileHandler on startup, to split logs also per-session
    root = logging.getLogger()
    for handler in root.handlers:
        if isinstance(handler, logging.handlers.RotatingFileHandler):
            handler.flush()
            if os.stat(handler.baseFilename).st_size > 0:
                handler.doRollover()

    # fix logging of py-evm (it uses a custom Trace logger from logging library)
    # if py-evm is not used this will throw, hence the try-catch block
    # for some reason it didn't work to put this into conftest.py
    try:
        from eth.tools.logging import setup_trace_logging
        setup_trace_logging()
    except ImportError:
        pass
Beispiel #26
0
def configure_logging(
    logger_level_config: Dict[str, str] = None,
    colorize: bool = True,
    log_json: bool = False,
    log_file: str = None,
    disable_debug_logfile: bool = False,
    debug_log_file_path: str = None,
    cache_logger_on_first_use: bool = True,
    _first_party_packages: FrozenSet[str] = _FIRST_PARTY_PACKAGES,
    _debug_log_file_additional_level_filters: Dict[str, str] = None,
) -> None:
    structlog.reset_defaults()

    logger_level_config = logger_level_config or dict()
    logger_level_config.setdefault("filelock", "ERROR")
    logger_level_config.setdefault("", DEFAULT_LOG_LEVEL)

    processors = [
        structlog.stdlib.add_logger_name,
        structlog.stdlib.add_log_level,
        add_greenlet_name,
        structlog.stdlib.PositionalArgumentsFormatter(),
        structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S.%f"),
        structlog.processors.StackInfoRenderer(),
        structlog.processors.format_exc_info,
    ]

    if log_json:
        formatter = "json"
    elif colorize and not log_file:
        formatter = "colorized"
    else:
        formatter = "plain"

    redact = redactor(LOG_BLACKLIST)

    handlers: Dict[str, Any] = dict()
    if log_file:
        handlers["file"] = {
            "class": "logging.handlers.WatchedFileHandler",
            "filename": log_file,
            "level": "DEBUG",
            "formatter": formatter,
            "filters": ["user_filter"],
        }
    else:
        handlers["default"] = {
            "class": "logging.StreamHandler",
            "level": "DEBUG",
            "formatter": formatter,
            "filters": ["user_filter"],
        }

    if not disable_debug_logfile:
        debug_logfile_path = configure_debug_logfile_path(debug_log_file_path)
        handlers["debug-info"] = {
            "class": "logging.handlers.RotatingFileHandler",
            "filename": debug_logfile_path,
            "level": "DEBUG",
            "formatter": "debug",
            "maxBytes": MAX_LOG_FILE_SIZE,
            "backupCount": LOG_BACKUP_COUNT,
            "filters": ["raiden_debug_file_filter"],
        }

    logging.config.dictConfig({
        "version": 1,
        "disable_existing_loggers": False,
        "filters": {
            "user_filter": {
                "()": RaidenFilter,
                "log_level_config": logger_level_config
            },
            "raiden_debug_file_filter": {
                "()": RaidenFilter,
                "log_level_config": {
                    "": DEFAULT_LOG_LEVEL,
                    "raiden": "DEBUG",
                    **(_debug_log_file_additional_level_filters or {}),
                },
            },
        },
        "formatters": {
            "plain": {
                "()":
                structlog.stdlib.ProcessorFormatter,
                "processor":
                _chain(structlog.dev.ConsoleRenderer(colors=False), redact),
                "foreign_pre_chain":
                processors,
            },
            "json": {
                "()": structlog.stdlib.ProcessorFormatter,
                "processor": _chain(structlog.processors.JSONRenderer(),
                                    redact),
                "foreign_pre_chain": processors,
            },
            "colorized": {
                "()":
                structlog.stdlib.ProcessorFormatter,
                "processor":
                _chain(structlog.dev.ConsoleRenderer(colors=True), redact),
                "foreign_pre_chain":
                processors,
            },
            "debug": {
                "()": structlog.stdlib.ProcessorFormatter,
                "processor": _chain(structlog.processors.JSONRenderer(),
                                    redact),
                "foreign_pre_chain": processors,
            },
        },
        "handlers": handlers,
        "loggers": {
            "": {
                "handlers": handlers.keys(),
                "propagate": True
            }
        },
    })
    structlog.configure(
        processors=processors +
        [structlog.stdlib.ProcessorFormatter.wrap_for_formatter],
        wrapper_class=structlog.stdlib.BoundLogger,
        logger_factory=structlog.stdlib.LoggerFactory(),
        cache_logger_on_first_use=cache_logger_on_first_use,
    )

    # set logging level of the root logger to DEBUG, to be able to intercept
    # all messages, which are then be filtered by the `RaidenFilter`
    structlog.get_logger("").setLevel(
        logger_level_config.get("", DEFAULT_LOG_LEVEL))
    for package in _first_party_packages:
        structlog.get_logger(package).setLevel("DEBUG")

    # rollover RotatingFileHandler on startup, to split logs also per-session
    root = logging.getLogger()
    for handler in root.handlers:
        if isinstance(handler, logging.handlers.RotatingFileHandler):
            handler.flush()
            if os.stat(handler.baseFilename).st_size > 0:
                handler.doRollover()

    # fix logging of py-evm (it uses a custom Trace logger from logging library)
    # if py-evm is not used this will throw, hence the try-catch block
    # for some reason it didn't work to put this into conftest.py
    try:
        from eth.tools.logging import setup_trace_logging

        setup_trace_logging()
    except ImportError:
        pass
from raiden.utils.nursery import Janitor, Nursery
from raiden.utils.typing import Address, Host, Port, TokenAmount

BaseURL = NewType("BaseURL", str)
Amount = NewType("Amount", int)
URL = NewType("URL", str)

processors = [
    structlog.stdlib.add_logger_name,
    structlog.stdlib.add_log_level,
    structlog.stdlib.PositionalArgumentsFormatter(),
    structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S.%f"),
    structlog.processors.StackInfoRenderer(),
    structlog.processors.format_exc_info,
]
structlog.reset_defaults()
logging.config.dictConfig({
    "version": 1,
    "disable_existing_loggers": False,
    "formatters": {
        "colorized-formatter": {
            "()": structlog.stdlib.ProcessorFormatter,
            "processor": structlog.dev.ConsoleRenderer(colors=True),
            "foreign_pre_chain": processors,
        }
    },
    "handlers": {
        "colorized-handler": {
            "class": "logging.StreamHandler",
            "level": "DEBUG",
            "formatter": "colorized-formatter",
 def tearDown(self):
     structlog.reset_defaults()
Beispiel #29
0
def load_logging_config(
    custom_processors: Optional[List[BaseProcessor]] = None,
    custom_handlers: Optional[List[Handler]] = None,
    use_hostname_processor: bool = True,
) -> logging.Logger:
    """Load the different logging config parameters as defined in the config of the application.

    Args:
        custom_processors: List of custom processors for log records
        custom_handlers: List of custom handlers to log records
        use_hostname_processor: Use the built-in HostNameProcessor for log records

    Returns:
        A list of handlers depending on the config if argument return_handlers has been set to True.
    """
    if config.logging.disable_processors:
        custom_processors = []
    else:
        custom_processors = custom_processors or []
        if use_hostname_processor:
            custom_processors.append(HostNameProcessor())

    pre_processors = [
        structlog.stdlib.filter_by_level,
    ]
    shared_processors = [
        structlog.stdlib.add_log_level,
        structlog.processors.StackInfoRenderer(),
        structlog.processors.format_exc_info,
        structlog.processors.TimeStamper(fmt=config.logging.date_format,
                                         utc=config.logging.use_utc),
        structlog.processors.UnicodeDecoder(),
        structlog.stdlib.add_logger_name,
    ] + custom_processors
    post_processors = [structlog.stdlib.ProcessorFormatter.wrap_for_formatter]

    structlog.reset_defaults()
    structlog.configure(
        processors=pre_processors + shared_processors + post_processors,
        logger_factory=structlog.stdlib.LoggerFactory(),
        cache_logger_on_first_use=True,
        wrapper_class=structlog.stdlib.BoundLogger,
    )

    use_colors = getattr(config.logging, "colors", False)

    default_level_styles = structlog.dev.ConsoleRenderer.get_default_level_styles(
        colors=use_colors)

    if use_colors:
        default_level_styles["debug"] = ""  # blue

    if config.logging.get("json_format"):
        formatter = structlog.stdlib.ProcessorFormatter(
            processor=structlog.processors.JSONRenderer(),
            foreign_pre_chain=shared_processors)
    else:
        formatter = structlog.stdlib.ProcessorFormatter(
            processor=structlog.dev.ConsoleRenderer(
                level_styles=default_level_styles, colors=use_colors),
            foreign_pre_chain=shared_processors,
        )

    stream_handler = logging.StreamHandler(stream=sys.stdout)
    stream_handler.setFormatter(formatter)

    root_logger = logging.getLogger()
    root_logger.handlers = []
    root_logger.addHandler(stream_handler)
    custom_handlers = custom_handlers or []
    for handler in custom_handlers:
        handler.setFormatter(formatter)
        root_logger.addHandler(handler)

    root_logger.setLevel(config.logging.min_level)

    # Add override for other loggers, usually loggers from libraries
    if hasattr(config.logging, "logger_overrides"):
        for logger_name, min_level in config.logging.logger_overrides.items():
            logging.getLogger(logger_name).setLevel(min_level)

    return root_logger
Beispiel #30
0
def configure_logging(
    logger_level_config: Dict[str, str] = None,
    colorize: bool = True,
    log_json: bool = False,
    log_file: str = None,
    disable_debug_logfile: bool = False,
):
    structlog.reset_defaults()
    if logger_level_config is None:
        logger_level_config = {'': DEFAULT_LOG_LEVEL}
    processors = [
        structlog.stdlib.add_logger_name,
        structlog.stdlib.add_log_level,
        structlog.stdlib.PositionalArgumentsFormatter(),
        structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S"),
        structlog.processors.StackInfoRenderer(),
        structlog.processors.format_exc_info,
    ]

    formatter = 'colorized' if colorize and not log_file else 'plain'
    if log_json:
        formatter = 'json'

    redact = redactor({
        re.compile(r'\b(access_?token=)([a-z0-9_-]+)', re.I):
        r'\1<redacted>',
    })
    _wrap_tracebackexception_format(redact)

    log_handler = _get_log_handler(
        formatter,
        log_file,
    )
    if disable_debug_logfile:
        combined_log_handlers = log_handler
    else:
        debug_log_file_handler = _get_log_file_handler()
        combined_log_handlers = {**log_handler, **debug_log_file_handler}

    logging.config.dictConfig(
        {
            'version': 1,
            'disable_existing_loggers': False,
            'filters': {
                'log_level_filter': {
                    '()': RaidenFilter,
                    'log_level_config': logger_level_config,
                },
                'log_level_debug_filter': {
                    '()': RaidenFilter,
                    'log_level_config': {
                        '': DEFAULT_LOG_LEVEL,
                        'raiden': 'DEBUG'
                    },
                },
            },
            'formatters': {
                'plain': {
                    '()':
                    structlog.stdlib.ProcessorFormatter,
                    'processor':
                    _chain(structlog.dev.ConsoleRenderer(colors=False),
                           redact),
                    'foreign_pre_chain':
                    processors,
                },
                'json': {
                    '()':
                    structlog.stdlib.ProcessorFormatter,
                    'processor':
                    _chain(structlog.processors.JSONRenderer(), redact),
                    'foreign_pre_chain':
                    processors,
                },
                'colorized': {
                    '()':
                    structlog.stdlib.ProcessorFormatter,
                    'processor':
                    _chain(structlog.dev.ConsoleRenderer(colors=True), redact),
                    'foreign_pre_chain':
                    processors,
                },
                'debug': {
                    '()':
                    structlog.stdlib.ProcessorFormatter,
                    'processor':
                    _chain(structlog.dev.ConsoleRenderer(colors=False),
                           redact),
                    'foreign_pre_chain':
                    processors,
                },
            },
            'handlers': combined_log_handlers,
            'loggers': {
                '': {
                    'handlers': list(combined_log_handlers.keys()),
                    'propagate': True,
                },
            },
        }, )
    structlog.configure(
        processors=processors + [
            structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
        ],
        wrapper_class=structlog.stdlib.BoundLogger,
        logger_factory=structlog.stdlib.LoggerFactory(),
        cache_logger_on_first_use=True,
    )

    # set logging level of the root logger to DEBUG, to be able to intercept
    # all messages, which are then be filtered by the `RaidenFilter`
    structlog.get_logger('').setLevel('DEBUG')
Beispiel #31
0
 def teardown_method(self, method):
     structlog.reset_defaults()
def __get_meta_logger() -> Any:
    """Meta-logger to emit messages generated during logger configuration"""
    set_optimized_structlog_config()
    logger = structlog.get_logger("structlog_sentry_logger._config")
    structlog.reset_defaults()
    return logger
def enable_sentry_integration_mode(monkeypatch: MonkeyPatch) -> None:
    structlog.reset_defaults()
    monkeypatch.setenv(
        "STRUCTLOG_SENTRY_LOGGER_CLOUD_SENTRY_INTEGRATION_MODE_ON",
        "ANY_VALUE",
    )
Beispiel #34
0
def set_config(minimal_level=None, json_minimal_level=None,
               json_file=None, override_files=None,
               thread_local_context=False, extra_context_func=None,
               json_only_keys=None, standard_logging_redirect=None,
               override_dict={}, syslog_address=None, syslog_format=None,
               fancy_output=None, auto_dump_locals=True):
    """Set the logging configuration.

    The configuration is cached. So you can call this several times.

    """
    global CONFIGURATION_SET
    Config.set_instance(minimal_level=minimal_level,
                        json_minimal_level=json_minimal_level,
                        json_file=json_file,
                        override_files=override_files,
                        thread_local_context=thread_local_context,
                        extra_context_func=extra_context_func,
                        json_only_keys=json_only_keys,
                        override_dict=override_dict,
                        syslog_address=syslog_address,
                        syslog_format=syslog_format,
                        fancy_output=fancy_output,
                        auto_dump_locals=auto_dump_locals)
    if standard_logging_redirect is not None:
        slr = standard_logging_redirect
    else:
        if 'MFLOG_STANDARD_LOGGING_REDIRECT' in os.environ:
            slr = (os.environ['MFLOG_STANDARD_LOGGING_REDIRECT'] == '1')
        else:
            slr = True  # default value
    if slr:
        # Configure standard logging redirect to structlog
        d = {
            "version": 1,
            "disable_existing_loggers": False,
            "formatters": {},
            "handlers": {},
            "filters": {},
            "loggers": {
                "": {
                    "level": "NOTSET"
                }
            }
        }
        logging.config.dictConfig(d)
        root_logger = logging.getLogger()
        root_logger.addHandler(StructlogHandler())
        root_logger.setLevel(logging.NOTSET)
    else:
        root_logger = logging.getLogger()
        root_logger.handlers = [x for x in root_logger.handlers
                                if not isinstance(x, StructlogHandler)]
    # Configure structlog
    context_class = None
    if thread_local_context:
        context_class = structlog.threadlocal.wrap_dict(dict)
    structlog.reset_defaults()
    structlog.configure(
        processors=[
            fltr,
            add_level,
            add_pid,
            add_extra_context,
            structlog.processors.TimeStamper(fmt="iso", utc=True),
            add_exception_info,
            structlog.stdlib.PositionalArgumentsFormatter(),
            structlog.processors.UnicodeDecoder(),
            # See https://stackoverflow.com/a/51629142
            # we do the formatting in the Logger
            lambda _, __, ed: ed
        ],
        cache_logger_on_first_use=True,
        wrapper_class=MFBoundLogger,
        context_class=context_class,
        logger_factory=MFLogLoggerFactory()
    )
    CONFIGURATION_SET = True