Exemplo n.º 1
0
def init():
    logging.basicConfig(stream=sys.stdout, format='%(message)s')

    logging.getLogger().setLevel(
        LOG_LEVEL_DEBUG if config.DEBUG else LOG_LEVEL_PROD)

    configure(
        processors=[
            filter_by_level, add_log_level, add_app_context, split_pos_args,
            TimeStamper(fmt='iso', utc=True),
            StackInfoRenderer(), format_exc_info,
            JSONRenderer(sort_keys=True)
        ],
        context_class=wrap_dict(dict),
        logger_factory=LoggerFactory(),
        wrapper_class=BoundLogger,
        cache_logger_on_first_use=True,
    )

    for logger_name in [
            'requests', 'statsd', 'amqpstorm', 'datadog.dogstatsd'
    ]:
        logging.getLogger(logger_name).setLevel(logging.WARNING)

    return get()
Exemplo n.º 2
0
    def _set_structured_logger(context, logger_name=None, service_tag=None):
        """
            use this function to get a structured logger and can use
            this to pass into the LogLambda decorator

            context - aws lambda context
            service - name of the service that this lambda belongs to
            logger_name - logger's name
            log_level - one of the levels in logging module
        """
        if LogLambdaBase._structured_logger:
            return LogLambdaBase._structured_logger

        stage_tag = get_stage(context)

        if logger_name:
            logger = logging.getLogger(str(logger_name))
        else:
            logger = logging.getLogger()

        # Python logger in AWS Lambda has a preset format.
        # To change the format of
        # the logging statement, remove the logging handler
        # and add a new handler with the required format
        for handler in logger.handlers:
            logger.removeHandler(handler)

        LogLambdaBase._log_handler = LogLambdaBase._get_handler()

        LogLambdaBase._log_handler.setFormatter(logging.Formatter(FORMAT))
        logger.addHandler(LogLambdaBase._log_handler)
        logger.setLevel(LogLambdaBase._log_level)
        logger.propagate = False

        wlogger = wrap_logger(
            logger,
            processors=[
                LogLambdaBase._filter_pii_info,
                add_logger_name,
                add_log_level,
                TimeStamper(fmt="iso"),
                StackInfoRenderer(),
                format_exc_info,
                JSONRenderer(separators=(',', ':'), sort_keys=True)])

        inferred_lambda_tag = context.function_name
        if stage_tag is not None:
            inferred_lambda_tag = inferred_lambda_tag.replace('{0}_'.format(stage_tag), '', 1)

        LogLambdaBase._structured_logger = wlogger.bind(
            aws_lambda_name=context.function_name,
            aws_lambda_request_id=context.aws_request_id,
            internal_service_tag=service_tag,
            inferred_stage_tag=stage_tag,
            inferred_lambda_tag=inferred_lambda_tag
        )
        return LogLambdaBase._structured_logger
Exemplo n.º 3
0
    def configure_logging(self):
        if self.app.testing:
            structlog.reset_defaults()

        disabled = [
            "docker.utils.config",
            "docker.auth",
            "docker.api.build",
            "docker.api.swarm",
            "docker.api.image",
            "rq.worker",
            "werkzeug",
            "requests",
            "urllib3",
        ]

        for logger in disabled:
            log = logging.getLogger(logger)
            log.setLevel(logging.ERROR)
            log.disabled = True
        self.app.logger.disabled = True

        logging.basicConfig(level=self.log_level,
                            stream=sys.stdout,
                            format="%(message)s")

        chain = [
            filter_by_level,
            add_log_level,
            add_logger_name,
            TimeStamper(fmt="iso"),
            StackInfoRenderer(),
            format_exc_info,
            JSONRenderer(indent=1, sort_keys=True),
        ]

        logger = logging.getLogger(__name__)

        if self.testing:
            chain = []
            logger = structlog.ReturnLogger()

        log = structlog.wrap_logger(
            logger,
            processors=chain,
            context_class=dict,
            wrapper_class=structlog.stdlib.BoundLogger,
            # cache_logger_on_first_use=True,
        )
        self.logger = log
        self.app.logger = self.logger
Exemplo n.º 4
0
def set_logger_config():
    structlog.configure(
        processors=[
            structlog.stdlib.PositionalArgumentsFormatter(),
            StackInfoRenderer(),
            format_exc_info,
            structlog.processors.UnicodeDecoder(),
            TimeStamper(fmt="iso", utc=True),
            add_pid_thread,
            add_log_level,
            add_caller_info,
            order_keys,
            BetterConsoleRenderer(),
        ],
        context_class=structlog.threadlocal.wrap_dict(dict),
        cache_logger_on_first_use=True,
    )
Exemplo n.º 5
0
def config_logger(verbose=0, logfile=None):
    if verbose >= 2:
        level = logging.DEBUG
    elif verbose >= 1:
        level = logging.INFO
    else:
        level = logging.WARNING

    if logfile is not None:
        _LOG_DISPATCHER.add_handler(LogWriter(logfile))

    processors = [
        add_log_level,
        add_timestamp,
        format_exc_info,
        StackInfoRenderer(),
        _LOG_DISPATCHER,
        LogRenderer(level=level),
    ]
    configure(processors=processors)
Exemplo n.º 6
0
def setup(sentry: str, debug: bool = False) -> None:
    processors = [
        filter_by_level,
        add_log_level,
        add_logger_name,
        PositionalArgumentsFormatter(),
        StackInfoRenderer(),
        format_exc_info,
        UnicodeDecoder(),
    ]

    configure(
        logger_factory=LoggerFactory(),
        wrapper_class=BoundLogger,
        cache_logger_on_first_use=True,
    )

    if debug:
        styles = ConsoleRenderer.get_default_level_styles()
        styles["debug"] = DIM
        processors += [
            TimeStamper(fmt="%Y-%m-%d %H:%M:%S"),
            ConsoleRenderer(level_styles=styles),
        ]
    else:
        handler = StreamHandler()
        formatter = CustomJsonFormatter("%(levelname)s %(name)s %(message)s")
        handler.setFormatter(formatter)
        for module in ("tornado", "tortoise", "aiomysql"):
            getLogger(module).addHandler(handler)

        sentry_logging = LoggingIntegration(level=INFO, event_level=ERROR)
        init(sentry, integrations=[sentry_logging])
        processors.append(JSONRenderer())

    handler = StreamHandler()
    configure(processors=processors)
    log = get_logger("api")
    log.addHandler(handler)
    log.propagate = False
    log.setLevel(DEBUG if debug else INFO)
Exemplo n.º 7
0
def sir():
    return StackInfoRenderer()
Exemplo n.º 8
0
# ┗━┓┃┣━┛┣╸  ┃  ┃ ┃┃╺┓
# ┗━┛╹╹  ┗━╸╹┗━╸┗━┛┗━┛

# SPDX-License-Identifier: MIT

# Author: Shane R. Spencer <*****@*****.**>

# Structlog: https://github.com/hynek/structlog
from structlog import get_logger
from structlog.dev import ConsoleRenderer, set_exc_info, _has_colorama
from structlog.processors import TimeStamper, StackInfoRenderer, format_exc_info
from structlog.contextvars import merge_contextvars

_logger = get_logger(processors=[
    merge_contextvars,
    StackInfoRenderer(),
    set_exc_info,
    format_exc_info,
    TimeStamper(fmt="%Y-%m-%d %H:%M.%S", utc=False),
    ConsoleRenderer(colors=_has_colorama),
])

access_log = _logger.bind()
app_log = _logger.bind()
gen_log = _logger.bind()


class Wut(object):
    def __init__(self):
        ...
Exemplo n.º 9
0
def main(
    paths: List[Path],
    input_file,
    output_base: Path,
    zlevel: int,
    clean_inputs: bool,
    block_size: int,
):
    # Structured (json) logging goes to stdout
    structlog.configure(processors=[
        StackInfoRenderer(),
        format_exc_info,
        TimeStamper(utc=False, fmt="iso"),
        JSONRenderer(),
    ])

    if (not output_base) and (not clean_inputs):
        raise click.UsageError(
            "Need to specify either a different output directory (--output-base) "
            "or to clean inputs (--clean-inputs)")

    if input_file:
        paths = chain((Path(p.strip()) for p in input_file), paths)

    with rasterio.Env():
        total = failures = 0
        for path in paths:
            total += 1

            # Input is either a tar.gz file, or a directory containing an MTL (already extracted)
            if path.suffix.lower() == ".gz":
                with tarfile.open(str(path), "r") as in_tar:
                    success = repackage_tar(
                        path,
                        _tar_members(in_tar),
                        _output_tar_path(output_base, path),
                        clean_inputs=clean_inputs,
                        zlevel=zlevel,
                        block_size=(block_size, block_size),
                    )

            elif path.is_dir():
                success = repackage_tar(
                    path,
                    _folder_members(path),
                    _output_tar_path_from_directory(output_base, path),
                    clean_inputs=clean_inputs,
                    zlevel=zlevel,
                    block_size=(block_size, block_size),
                )
            else:
                raise ValueError(
                    f"Expected either tar.gz or a dataset folder. "
                    f"Got: {repr(path)}")

            if not success:
                failures += 1
    if total > 1:
        _LOG.info(
            "node.finish",
            host=socket.getfqdn(),
            total_count=total,
            failure_count=failures,
        )
    sys.exit(failures)
Exemplo n.º 10
0
from __future__ import absolute_import, division, print_function

import warnings

from structlog._compat import OrderedDict
from structlog._generic import BoundLogger
from structlog._loggers import (
    PrintLoggerFactory, )
from structlog.processors import (
    KeyValueRenderer,
    StackInfoRenderer,
    format_exc_info,
)

_BUILTIN_DEFAULT_PROCESSORS = [
    StackInfoRenderer(), format_exc_info,
    KeyValueRenderer()
]
_BUILTIN_DEFAULT_CONTEXT_CLASS = OrderedDict
_BUILTIN_DEFAULT_WRAPPER_CLASS = BoundLogger
_BUILTIN_DEFAULT_LOGGER_FACTORY = PrintLoggerFactory()
_BUILTIN_CACHE_LOGGER_ON_FIRST_USE = False


class _Configuration(object):
    """
    Global defaults.
    """
    is_configured = False
    default_processors = _BUILTIN_DEFAULT_PROCESSORS[:]
    default_context_class = _BUILTIN_DEFAULT_CONTEXT_CLASS