Exemplo n.º 1
0
 def test_wrap_returns_distinct_classes(self):
     D1 = wrap_dict(dict)
     D2 = wrap_dict(dict)
     assert D1 != D2
     assert D1 is not D2
     D1.x = 42
     D2.x = 23
     assert D1.x != D2.x
Exemplo n.º 2
0
 def test_wrap_returns_distinct_classes(self):
     """
     Each call to wrap_dict returns a distinct new class whose context is
     independent from others.
     """
     D1 = wrap_dict(dict)
     D2 = wrap_dict(dict)
     assert D1 != D2
     assert D1 is not D2
     D1.x = 42
     D2.x = 23
     assert D1.x != D2.x
Exemplo n.º 3
0
 def test_wrap_returns_distinct_classes(self):
     """
     Each call to wrap_dict returns a distinct new class whose context is
     independent from others.
     """
     D1 = wrap_dict(dict)
     D2 = wrap_dict(dict)
     assert D1 != D2
     assert D1 is not D2
     D1.x = 42
     D2.x = 23
     assert D1.x != D2.x
Exemplo n.º 4
0
def run():
    logging.basicConfig(format=settings.LOGGING_FORMAT,
                        datefmt="%Y-%m-%dT%H:%M:%S",
                        level=settings.LOGGING_LEVEL)

    logging.getLogger('sdc.rabbit').setLevel(logging.INFO)

    # These structlog settings allow bound fields to persist between classes
    structlog.configure(logger_factory=LoggerFactory(), context_class=wrap_dict(dict))
    logger = structlog.getLogger()

    logger.info('Starting SDX Downstream', version=__version__)

    message_processor = MessageProcessor()

    quarantine_publisher = QueuePublisher(
        urls=settings.RABBIT_URLS,
        queue=settings.RABBIT_QUARANTINE_QUEUE
    )

    message_consumer = MessageConsumer(
        durable_queue=True,
        exchange=settings.RABBIT_EXCHANGE,
        exchange_type='topic',
        rabbit_queue=settings.RABBIT_QUEUE,
        rabbit_urls=settings.RABBIT_URLS,
        quarantine_publisher=quarantine_publisher,
        process=message_processor.process
    )

    try:
        message_consumer.run()
    except KeyboardInterrupt:
        message_consumer.stop()
Exemplo n.º 5
0
 def __init__(self,
              log_level="INFO",
              log_network="udp",
              log_address="/dev/log"):
     """Initialize the logger."""
     self.level = {
         "CRITICAL": logging.CRITICAL,
         "ERROR": logging.ERROR,
         "WARNING": logging.WARNING,
         "INFO": logging.INFO,
         "DEBUG": logging.DEBUG,
         "NOTSET": logging.NOTSET,
     }
     self.log_level = log_level
     self.syslog_address = log_address
     self.socktype = socket.SOCK_DGRAM
     if log_network == "tcp":
         self.socktype = socket.SOCK_STREAM
     configure(
         context_class=threadlocal.wrap_dict(dict),
         logger_factory=stdlib.LoggerFactory(),
         wrapper_class=stdlib.BoundLogger,
         processors=[
             stdlib.filter_by_level,
             stdlib.add_logger_name,
             stdlib.add_log_level,
             stdlib.PositionalArgumentsFormatter(),
             processors.TimeStamper(fmt="iso"),
             processors.StackInfoRenderer(),
             processors.format_exc_info,
             processors.UnicodeDecoder(),
             stdlib.render_to_log_kwargs,
         ],
     )
Exemplo n.º 6
0
 def test_FallbackEncoder_handles_ThreadLocalDictWrapped_dicts(self):
     """
     Our fallback handling handles properly ThreadLocalDictWrapper values.
     """
     s = json.dumps(wrap_dict(dict)({'a': 42}),
                    default=_json_fallback_handler)
     assert '{"a": 42}' == s
Exemplo n.º 7
0
def get_logger(name):
    Fhandler = logging.FileHandler("analysis.log")
    Fhandler.setFormatter(jsonlogger.JsonFormatter("%(message)s %(name)s"))
    Fhandler.setLevel(level=logging.DEBUG)

    # Chandler = logging.StreamHandler(sys.stdout)
    # Chandler.setLevel(level=logging.INFO)

    # C_filter = logging.Filter()
    # C_filter.filter = lambda record : record.levelno >= logging.WARNING

    # Chandler.addFilter(C_filter)

    root_logger = logging.getLogger(name)
    root_logger.addHandler(Fhandler)
    # root_logger.addHandler(Chandler)
    root_logger.setLevel(level=logging.DEBUG)

    struct_logger = structlog.wrap_logger(
        root_logger,
        context_class=threadlocal.wrap_dict(dict),
        logger_factory=stdlib.LoggerFactory(),
        wrapper_class=stdlib.BoundLogger,
        processors=[
            stdlib.filter_by_level,
            stdlib.add_logger_name,
            stdlib.add_log_level,
            stdlib.PositionalArgumentsFormatter(),
            processors.TimeStamper(fmt="iso"),
            processors.StackInfoRenderer(),
            processors.format_exc_info,
            processors.UnicodeDecoder(),
            stdlib.render_to_log_kwargs]
    )
    return struct_logger
def configure_logging():
    # set up some sane logging, as opposed to what flask does by default
    log_format = "%(message)s"
    handler = logging.StreamHandler()
    logging.basicConfig(level=logging.INFO,
                        format=log_format,
                        handlers=[handler])

    def parse_exception(_, __, event_dict):
        exception = event_dict.get('exception')
        if exception:
            event_dict['exception'] = exception.replace("\"", "'").split("\n")
        return event_dict

    # setup file logging
    renderer_processor = JSONRenderer()
    processors = [
        add_log_level,
        TimeStamper(key='created', fmt='iso'), format_exc_info,
        parse_exception, renderer_processor
    ]
    configure(context_class=wrap_dict(dict),
              logger_factory=LoggerFactory(),
              processors=processors,
              cache_logger_on_first_use=True)
Exemplo n.º 9
0
def _configure_logger(level='INFO', indent=None):
    logging.basicConfig(stream=sys.stdout, level=level, format='%(message)s')

    try:
        indent = int(os.getenv('LOGGING_JSON_INDENT') or indent)
    except TypeError:
        indent = None
    except ValueError:
        indent = None

    def add_service(_, __, event_dict):
        """
        Add the service name to the event dict.
        """
        event_dict['service'] = os.getenv('NAME', 'sdc-responses-dashboard')
        return event_dict

    renderer_processor = JSONRenderer(indent=indent)
    processors = [
        add_log_level, filter_by_level, add_service, format_exc_info,
        add_logger_name,
        TimeStamper(fmt='%Y-%m-%dT%H:%M%s', utc=True,
                    key='created_at'), renderer_processor
    ]
    structlog.configure(context_class=wrap_dict(dict),
                        logger_factory=LoggerFactory(),
                        processors=processors,
                        cache_logger_on_first_use=True)
Exemplo n.º 10
0
def init():
    logging.basicConfig(stream=sys.stdout, format='%(message)s')

    logging.getLogger().setLevel(
        LOG_LEVEL_DEBUG if config.DEBUG else LOG_LEVEL_PROD)

    configure(
        processors=[
            filter_by_level, add_log_level, add_app_context, split_pos_args,
            TimeStamper(fmt='iso', utc=True),
            StackInfoRenderer(), format_exc_info,
            JSONRenderer(sort_keys=True)
        ],
        context_class=wrap_dict(dict),
        logger_factory=LoggerFactory(),
        wrapper_class=BoundLogger,
        cache_logger_on_first_use=True,
    )

    for logger_name in [
            'requests', 'statsd', 'amqpstorm', 'datadog.dogstatsd'
    ]:
        logging.getLogger(logger_name).setLevel(logging.WARNING)

    return get()
Exemplo n.º 11
0
 def test_FallbackEncoder_handles_ThreadLocalDictWrapped_dicts(self):
     """
     Our fallback handling handles properly ThreadLocalDictWrapper values.
     """
     s = json.dumps(wrap_dict(dict)({'a': 42}),
                    default=_json_fallback_handler)
     assert '{"a": 42}' == s
Exemplo n.º 12
0
    def setUp(self):
        logging.basicConfig(format=settings.LOGGING_FORMAT,
                            datefmt="%Y-%m-%dT%H:%M:%S",
                            level=settings.LOGGING_LEVEL)

        logging.getLogger('sdc.rabbit').setLevel(logging.INFO)

        structlog.configure(logger_factory=LoggerFactory(), context_class=wrap_dict(dict))
        self.message_processor = MessageProcessor()
Exemplo n.º 13
0
 def test_context_is_global_to_thread(self, D):
     d1 = D({'a': 42})
     d2 = D({'b': 23})
     d3 = D()
     assert {'a': 42, 'b': 23} == d1._dict == d2._dict == d3._dict
     assert d1 == d2 == d3
     D_ = wrap_dict(dict)
     d_ = D_({'a': 42, 'b': 23})
     assert d1 != d_
    def setUp(self):
        logging.basicConfig(format=settings.LOGGING_FORMAT,
                            datefmt="%Y-%m-%dT%H:%M:%S",
                            level=settings.LOGGING_LEVEL)

        logging.getLogger('sdc.rabbit').setLevel(logging.INFO)

        structlog.configure(logger_factory=LoggerFactory(), context_class=wrap_dict(dict))
        self.message_processor = MessageProcessor()
Exemplo n.º 15
0
def logger_initial_config(service_name=None,  # noqa: C901  pylint: disable=too-complex
                          log_level=None,
                          logger_format=None,
                          logger_date_format=None):
    if not logger_date_format:
        logger_date_format = os.getenv('LOGGING_DATE_FORMAT', "%Y-%m-%dT%H:%M%s")
    if not log_level:
        log_level = os.getenv('SMS_LOG_LEVEL', 'INFO')
    if not logger_format:
        logger_format = "%(message)s"
    if not service_name:
        service_name = os.getenv('NAME', 'ras-frontstage')
    try:
        indent = int(os.getenv('JSON_INDENT_LOGGING'))
    except TypeError:
        indent = None
    except ValueError:
        indent = None

    def add_service(logger, method_name, event_dict):  # pylint: disable=unused-argument
        """
        Add the service name to the event dict.
        """
        event_dict['service'] = service_name
        return event_dict

    logging.basicConfig(stream=sys.stdout, level=log_level, format=logger_format)
    oauth_log = logging.getLogger("requests_oauthlib")
    oauth_log.addHandler(logging.NullHandler())
    oauth_log.propagate = False

    def zipkin_ids(logger, method_name, event_dict):
        event_dict['trace'] = ''
        event_dict['span'] = ''
        event_dict['parent'] = ''
        if not flask.has_app_context():
            return event_dict
        if '_zipkin_span' not in g:
            return event_dict
        event_dict['span'] = g._zipkin_span.zipkin_attrs.span_id
        event_dict['trace'] = g._zipkin_span.zipkin_attrs.trace_id
        event_dict['parent'] = g._zipkin_span.zipkin_attrs.parent_span_id
        return event_dict

    def parse_exception(_, __, event_dict):
        exception = event_dict.get('exception')
        if exception:
            event_dict['exception'] = exception.replace("\"", "'").split("\n")
        return event_dict

    # setup file logging
    renderer_processor = JSONRenderer(indent=indent)
    processors = [zipkin_ids, add_log_level, filter_by_level, add_service, format_exc_info,
                  TimeStamper(fmt=logger_date_format, utc=True, key='created_at'), parse_exception, renderer_processor]
    configure(context_class=wrap_dict(dict), logger_factory=LoggerFactory(), processors=processors,
              cache_logger_on_first_use=True)
Exemplo n.º 16
0
def logger_initial_config(log_level="INFO",
                          logger_format="%(message)s",
                          logger_date_format="%Y-%m-%dT%H:%M%s"):
    def add_service(logger, method_name, event_dict):
        """
        Add the service name to the event dict.
        This adds `service: 'ras-frontstage'` to all log lines.
        """
        event_dict["service"] = "ras-frontstage"
        return event_dict

    logging.basicConfig(stream=sys.stdout,
                        level=log_level,
                        format=logger_format)
    auth_log = logging.getLogger(__name__)
    auth_log.addHandler(logging.NullHandler())
    auth_log.propagate = False

    def add_severity_level(logger, method_name, event_dict):
        """
        Add the log level to the event dict.
        """
        if method_name == "warn":
            # The stdlib has an alias
            method_name = "warning"

        event_dict["severity"] = method_name
        return event_dict

    def parse_exception(_, __, event_dict):
        exception = event_dict.get("exception")
        if exception:
            event_dict["exception"] = exception.replace('"', "'").split("\n")
        return event_dict

    # setup file logging
    renderer_processor = JSONRenderer(indent=None)

    processors = [
        add_severity_level,
        add_log_level,
        filter_by_level,
        add_service,
        format_exc_info,
        TimeStamper(fmt=logger_date_format, utc=True, key="created_at"),
        parse_exception,
        renderer_processor,
    ]
    configure(
        context_class=wrap_dict(dict),
        logger_factory=LoggerFactory(),
        processors=processors,
        cache_logger_on_first_use=True,
    )
Exemplo n.º 17
0
    def setUp(self):
        logging.basicConfig(format=settings.LOGGING_FORMAT,
                            datefmt="%Y-%m-%dT%H:%M:%S",
                            level=settings.LOGGING_LEVEL)

        logging.getLogger('sdc.rabbit').setLevel(logging.INFO)

        structlog.configure(logger_factory=LoggerFactory(), context_class=wrap_dict(dict))
        survey = json.loads(cord_survey)
        self.processor = TransformProcessor(survey, ftpconn)
        self.processor.ftp.unzip_and_deliver = MagicMock(return_value=True)
    def setUp(self):
        logging.basicConfig(format=settings.LOGGING_FORMAT,
                            datefmt="%Y-%m-%dT%H:%M:%S",
                            level=settings.LOGGING_LEVEL)

        logging.getLogger('sdc.rabbit').setLevel(logging.INFO)

        structlog.configure(logger_factory=LoggerFactory(), context_class=wrap_dict(dict))
        survey = json.loads(cora_survey)
        self.processor = CoraProcessor(survey, ftpconn)
        self.processor.ftp.unzip_and_deliver = MagicMock(return_value=True)
Exemplo n.º 19
0
    def test_is_greenlet_local(self, D):
        greenlet = pytest.importorskip("greenlet")
        d = wrap_dict(dict)()
        d['x'] = 42

        def run():
            assert 'x' not in d._dict
            d['x'] = 23

        greenlet.greenlet(run).switch()
        assert 42 == d._dict["x"]
Exemplo n.º 20
0
def configure():
    conf(context_class=threadlocal.wrap_dict(dict),
         logger_factory=stdlib.LoggerFactory(),
         wrapper_class=stdlib.BoundLogger,
         processors=[
             stdlib.PositionalArgumentsFormatter(),
             processors.TimeStamper(fmt="iso"),
             processors.StackInfoRenderer(),
             processors.format_exc_info,
             processors.UnicodeDecoder(),
             stdlib.render_to_log_kwargs,
         ])
Exemplo n.º 21
0
 def test_context_is_global_to_thread(self, D):
     """
     The context is shared between all instances of a wrapped class.
     """
     d1 = D({'a': 42})
     d2 = D({'b': 23})
     d3 = D()
     assert {'a': 42, 'b': 23} == d1._dict == d2._dict == d3._dict
     assert d1 == d2 == d3
     D_ = wrap_dict(dict)
     d_ = D_({'a': 42, 'b': 23})
     assert d1 != d_
Exemplo n.º 22
0
 def test_context_is_global_to_thread(self, D):
     """
     The context is shared between all instances of a wrapped class.
     """
     d1 = D({'a': 42})
     d2 = D({'b': 23})
     d3 = D()
     assert {'a': 42, 'b': 23} == d1._dict == d2._dict == d3._dict
     assert d1 == d2 == d3
     D_ = wrap_dict(dict)
     d_ = D_({'a': 42, 'b': 23})
     assert d1 != d_
Exemplo n.º 23
0
    def test_is_greenlet_local(self, D):
        """
        Context is shared between greenlets.
        """
        d = wrap_dict(dict)()
        d['switch'] = 42

        def run():
            assert 'x' not in d._dict
            d['switch'] = 23

        greenlet.greenlet(run).switch()
        assert 42 == d._dict["switch"]
Exemplo n.º 24
0
    def test_is_greenlet_local(self, D):
        """
        Context is shared between greenlets.
        """
        d = wrap_dict(dict)()
        d['switch'] = 42

        def run():
            assert 'x' not in d._dict
            d['switch'] = 23

        greenlet.greenlet(run).switch()
        assert 42 == d._dict["switch"]
Exemplo n.º 25
0
def initialise_logging(
        additional_loggers: dict[str, dict[str, Any]] | None = None) -> None:
    """
    Initialise the StructLog logging setup.

    An example of the additional_loggers format:

    additional_logging = {
        "zeep.transports": {  # set to debug to see XML in loggging
            "level": os.environ.get("ZEEP_TRANSPORT_LOGLEVEL", "INFO").upper(),
            "propagate": True,
            "handlers": ["default"],
        },
        "ims.ims_client": {  # set to debug to see more messages from IMS client
            "level": os.environ.get("IMSCLIENT_LOGLEVEL", "INFO").upper(),
            "propagate": False,
            "handlers": ["default"],
        },
    }

    Args:
        additional_loggers: if you need additional loggers for specific log requirements of libraries you can add a
        dict with the additional config.

    """
    if additional_loggers is None:
        additional_loggers = {}

    logging.config.dictConfig(
        dict(
            loggers={
                "": {
                    "handlers": ["default"],
                    "level": f"{LOG_LEVEL}",
                    "propagate": True
                },
                **additional_loggers,
            },
            **logconfig_dict,
        ))

    structlog.configure(
        processors=pre_chain +
        [structlog.stdlib.ProcessorFormatter.wrap_for_formatter
         ],  # type: ignore
        context_class=wrap_dict(dict),  # type: ignore
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True,
    )
Exemplo n.º 26
0
    def test_is_thread_local(self, D):
        class TestThread(threading.Thread):
            def __init__(self, d):
                self._d = d
                threading.Thread.__init__(self)

            def run(self):
                assert 'x' not in self._d._dict
                self._d['x'] = 23
        d = wrap_dict(dict)()
        d['x'] = 42
        t = TestThread(d)
        t.start()
        t.join()
        assert 42 == d._dict['x']
def configure_logging():
    log_level = logging.INFO
    debug = os.getenv("FLASK_ENV") == "development"
    if debug:
        log_level = logging.DEBUG

    log_handler = logging.StreamHandler(sys.stdout)
    log_handler.setLevel(log_level)
    log_handler.addFilter(lambda record: record.levelno <= logging.WARNING)

    error_log_handler = logging.StreamHandler(sys.stderr)
    error_log_handler.setLevel(logging.ERROR)

    logging.basicConfig(level=log_level,
                        format="%(message)s",
                        handlers=[error_log_handler, log_handler])

    # Set werkzeug logging level
    werkzeug_logger = logging.getLogger("werkzeug")
    werkzeug_logger.setLevel(level=log_level)

    def parse_exception(_, __, event_dict):
        if debug:
            return event_dict
        exception = event_dict.get("exception")
        if exception:
            event_dict["exception"] = exception.replace('"', "'").split("\n")
        return event_dict

    # setup file logging
    renderer_processor = (ConsoleRenderer() if debug else JSONRenderer(
        serializer=json_dumps))
    processors = [
        add_log_level,
        TimeStamper(key="created", fmt="iso"),
        add_service,
        format_exc_info,
        parse_exception,
        renderer_processor,
    ]

    configure(
        context_class=wrap_dict(dict),
        logger_factory=LoggerFactory(),
        processors=processors,
        cache_logger_on_first_use=True,
    )
Exemplo n.º 28
0
    def test_is_thread_local(self, D):
        """
        The context is *not* shared between threads.
        """
        class TestThread(threading.Thread):
            def __init__(self, d):
                self._d = d
                threading.Thread.__init__(self)

            def run(self):
                assert 'tl' not in self._d._dict
                self._d['tl'] = 23
        d = wrap_dict(dict)()
        d['tl'] = 42
        t = TestThread(d)
        t.start()
        t.join()
        assert 42 == d._dict['tl']
Exemplo n.º 29
0
    def test_is_thread_local(self, D):
        """
        The context is *not* shared between threads.
        """
        class TestThread(threading.Thread):
            def __init__(self, d):
                self._d = d
                threading.Thread.__init__(self)

            def run(self):
                assert 'tl' not in self._d._dict
                self._d['tl'] = 23

        d = wrap_dict(dict)()
        d['tl'] = 42
        t = TestThread(d)
        t.start()
        t.join()
        assert 42 == d._dict['tl']
Exemplo n.º 30
0
def logging_setup(app):
    # Add file handler

    file_handler = handlers.RotatingFileHandler(
        app.config['LOGGING_LOCATION'],
        maxBytes=4 * 1024 * 1024,  # 4 MB
        backupCount=10
    )
    file_handler.setLevel(app.config['LOGGING_LEVEL'])
    file_handler_formatter = Formatter(app.config['LOGGING_FORMAT'])
    file_handler.setFormatter(file_handler_formatter)
    app.logger.addHandler(file_handler)

    # Configure structlog
    configure(
        context_class=wrap_dict(dict),
        logger_factory=lambda: app.logger,
        processors=[processors.JSONRenderer()]
    )
Exemplo n.º 31
0
def configure_logging():
    # set up some sane logging, as opposed to what flask does by default
    log_format = "%(message)s"
    levels = {
        'CRITICAL': logging.CRITICAL,
        'ERROR': logging.ERROR,
        'WARNING': logging.WARNING,
        'INFO': logging.INFO,
        'DEBUG': logging.DEBUG,
    }
    handler = logging.StreamHandler()
    logging.basicConfig(level=levels[EQ_LOG_LEVEL],
                        format=log_format,
                        handlers=[handler])

    # turn boto logging to critical as it logs far too much and it's only used
    # for cloudwatch logging
    logging.getLogger("botocore").setLevel(logging.ERROR)
    if EQ_CLOUDWATCH_LOGGING:
        _setup_cloud_watch_logging()

    # Set werkzeug logging level
    werkzeug_logger = logging.getLogger('werkzeug')
    werkzeug_logger.setLevel(level=levels[EQ_WERKZEUG_LOG_LEVEL])

    # setup file logging
    rotating_log_file = RotatingFileHandler(filename="eq.log",
                                            maxBytes=1048576,
                                            backupCount=10)
    logging.getLogger().addHandler(rotating_log_file)
    renderer_processor = ConsoleRenderer(
    ) if EQ_DEVELOPER_LOGGING else JSONRenderer()
    processors = [
        add_log_level,
        TimeStamper(key='created', fmt='iso'), add_service, format_exc_info,
        renderer_processor
    ]
    configure(context_class=wrap_dict(dict),
              logger_factory=LoggerFactory(),
              processors=processors,
              cache_logger_on_first_use=True)
Exemplo n.º 32
0
def configure_logging(cfg_type):
    # Get the logging config
    log_cfg = None
    if cfg_type.upper() in log_cfgs:
        log_cfg = log_cfgs[cfg_type.upper()]
    else:
        log_cfg = log_cfgs['LOCAL']
    logging.config.dictConfig(log_cfg)

    # Configure the logger
    configure(context_class=threadlocal.wrap_dict(dict),
              logger_factory=stdlib.LoggerFactory(),
              wrapper_class=stdlib.BoundLogger,
              processors=[
                  stdlib.filter_by_level, stdlib.add_logger_name,
                  stdlib.add_log_level,
                  stdlib.PositionalArgumentsFormatter(),
                  processors.TimeStamper(fmt="iso"),
                  processors.StackInfoRenderer(), processors.format_exc_info,
                  processors.UnicodeDecoder(), stdlib.render_to_log_kwargs
              ])
Exemplo n.º 33
0
def get_logger() -> Any:
    global LOGGER
    if not LOGGER:
        from structlog import configure, processors, stdlib, threadlocal, get_logger
        from pythonjsonlogger import jsonlogger

        logging.config.dictConfig(dict_config)

        configure(
            context_class=threadlocal.wrap_dict(dict),
            logger_factory=stdlib.LoggerFactory(),
            wrapper_class=stdlib.BoundLogger,
            processors=[
                # Filter only the required log levels into the log output
                stdlib.filter_by_level,
                # Adds logger=module_name (e.g __main__)
                stdlib.add_logger_name,
                # Uppercase structlog's event name which shouldn't be convoluted with AWS events.
                event_uppercase,
                # Censor secure data
                censor_header,
                # Allow for string interpolation
                stdlib.PositionalArgumentsFormatter(),
                # Render timestamps to ISO 8601
                processors.TimeStamper(fmt="iso"),
                # Include the stack dump when stack_info=True
                processors.StackInfoRenderer(),
                # Include the application exception when exc_info=True
                # e.g log.exception() or log.warning(exc_info=True)'s behavior
                processors.format_exc_info,
                # Decodes the unicode values in any kv pairs
                processors.UnicodeDecoder(),
                # Creates the necessary args, kwargs for log()
                stdlib.render_to_log_kwargs,
            ],
            cache_logger_on_first_use=True,
        )
        LOGGER = get_logger()
    return LOGGER
Exemplo n.º 34
0
def configure_logging():
    # set up some sane logging, as opposed to what flask does by default
    log_format = "%(message)s"
    levels = {
        'CRITICAL': logging.CRITICAL,
        'ERROR': logging.ERROR,
        'WARNING': logging.WARNING,
        'INFO': logging.INFO,
        'DEBUG': logging.DEBUG,
    }
    handler = logging.StreamHandler()
    logging.basicConfig(level=levels[EQ_LOG_LEVEL],
                        format=log_format,
                        handlers=[handler])

    # Set werkzeug logging level
    werkzeug_logger = logging.getLogger('werkzeug')
    werkzeug_logger.setLevel(level=levels[EQ_WERKZEUG_LOG_LEVEL])

    def parse_exception(_, __, event_dict):
        if EQ_DEVELOPER_LOGGING:
            return event_dict
        exception = event_dict.get('exception')
        if exception:
            event_dict['exception'] = exception.replace("\"", "'").split("\n")
        return event_dict

    # setup file logging
    renderer_processor = ConsoleRenderer(
    ) if EQ_DEVELOPER_LOGGING else JSONRenderer()
    processors = [
        add_log_level,
        TimeStamper(key='created', fmt='iso'), add_service, format_exc_info,
        parse_exception, renderer_processor
    ]
    configure(context_class=wrap_dict(dict),
              logger_factory=LoggerFactory(),
              processors=processors,
              cache_logger_on_first_use=True)
Exemplo n.º 35
0
def launch_gunicorn():
    from structlog.stdlib import LoggerFactory
    from structlog.threadlocal import wrap_dict
    structlog.configure(context_class=wrap_dict(dict), logger_factory=LoggerFactory())

    structlog.configure_once(
        #processors=chain,
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True,
    )

    try:
        httpd = HTTPServer(('0.0.0.0', 8000), MetricsHandler)
    except (OSError, socket.error):
        return

    thread = PrometheusEndpointServer(httpd)
    thread.daemon = True
    thread.start()
    #log.info('Exporting Prometheus metrics on port 8000')

    app.run(host='0.0.0.0')
Exemplo n.º 36
0
        event_kw['greenlet_id'] = id(gevent.getcurrent())
        return super(BoundLogger, self)._proxy_to_logger(method_name, event,
                                                         **event_kw)

structlog.configure(
    processors=[
        structlog.stdlib.filter_by_level,
        structlog.processors.TimeStamper(fmt='iso', utc=True),
        structlog.processors.StackInfoRenderer(),
        _safe_exc_info_renderer,
        _record_module,
        _record_level,
        _format_string_renderer,
        structlog.processors.JSONRenderer(),
    ],
    context_class=wrap_dict(dict),
    logger_factory=structlog.stdlib.LoggerFactory(),
    wrapper_class=BoundLogger,
    cache_logger_on_first_use=True,
)
get_logger = structlog.get_logger


def configure_logging(is_prod=False):
    # The is_prod argument is ignored and only retained for compatibility.
    tty_handler = logging.StreamHandler(sys.stdout)
    if sys.stdout.isatty():
        # Use a more human-friendly format.
        formatter = colorlog.ColoredFormatter(
            '%(log_color)s[%(levelname)s]%(reset)s %(message)s',
            reset=True, log_colors={'DEBUG': 'cyan', 'INFO': 'green',
Exemplo n.º 37
0
def log():
    """
    Returns a ReturnLogger with a freshly wrapped OrderedDict.
    """
    return wrap_logger(logger(), context_class=wrap_dict(OrderedDict))
Exemplo n.º 38
0
def D():
    """
    Returns a dict wrapped in _ThreadLocalDictWrapper.
    """
    return wrap_dict(dict)
Exemplo n.º 39
0
 def test_FallbackEncoder_handles_ThreadLocalDictWrapped_dicts(self):
     s = json.dumps(wrap_dict(dict)({'a': 42}),
                    cls=_JSONFallbackEncoder)
     assert '{"a": 42}' == s
Exemplo n.º 40
0
def D():
    """
    Returns a dict wrapped in _ThreadLocalDictWrapper.
    """
    return wrap_dict(dict)
Exemplo n.º 41
0
            'class': 'pythonjsonlogger.jsonlogger.JsonFormatter'
        }
    },
    'handlers': {
        'json': {
            'class': 'logging.StreamHandler',
            'formatter': 'json'
        }
    },
    'loggers': {
        '': {
            'handlers': ['json'],
            'level': logging.DEBUG
        }
    }
})

configure(context_class=threadlocal.wrap_dict(dict),
          logger_factory=stdlib.LoggerFactory(),
          wrapper_class=stdlib.BoundLogger,
          processors=[
              stdlib.filter_by_level, stdlib.add_logger_name,
              stdlib.add_log_level,
              stdlib.PositionalArgumentsFormatter(),
              processors.TimeStamper(fmt="iso"),
              processors.StackInfoRenderer(), processors.format_exc_info,
              processors.UnicodeDecoder(), stdlib.render_to_log_kwargs
          ])

log = structlog.getLogger()
Exemplo n.º 42
0
    MONGO_DB = 'eschernode'
    FILEBEAT_LOGFILE = os.path.join(HOME_DIR, 'results', 'filebeat.log')
    ALLOWED_HOSTS = ['172.30.0.251']

# Structlog config
structlog.configure(
    processors=[
        structlog.stdlib.add_logger_name, structlog.stdlib.add_log_level,
        structlog.stdlib.PositionalArgumentsFormatter(),
        structlog.processors.TimeStamper(fmt="iso"),
        structlog.processors.StackInfoRenderer(),
        structlog.processors.format_exc_info,
        structlog.processors.UnicodeDecoder(),
        structlog.processors.JSONRenderer()
    ],
    context_class=wrap_dict(dict),
    logger_factory=structlog.stdlib.LoggerFactory(),
    wrapper_class=structlog.stdlib.BoundLogger,
    cache_logger_on_first_use=True,
)

logger = logging.getLogger('train_logs')
logger.setLevel(logging.INFO)

handler = WatchedFileHandler(FILEBEAT_LOGFILE)
logger.addHandler(handler)

mongoClient = MongoClient(MONGO_HOST,
                          MONGO_PORT,
                          maxPoolSize=200,
                          connect=False)
Exemplo n.º 43
0
def log():
    """
    Returns a ReturnLogger with a freshly wrapped OrderedDict.
    """
    return wrap_logger(logger(), context_class=wrap_dict(OrderedDict))
Exemplo n.º 44
0
def log(logger):
    """
    Returns a ReturnLogger with a freshly wrapped dict.
    """
    return wrap_logger(logger, context_class=wrap_dict(dict))
Exemplo n.º 45
0
        'botocore': {
            'level': 'WARNING',
            'propagate': True
        },
        'boto3': {
            'level': 'WARNING',
            'propagate': True
        },
        'cssutils': {
            'level': 'CRITICAL',
            'propagate': True
        }
    },
}

WrappedDictClass = wrap_dict(dict)

structlog.configure(
    processors=[
        structlog.stdlib.filter_by_level, structlog.stdlib.add_logger_name,
        structlog.stdlib.add_log_level,
        structlog.stdlib.PositionalArgumentsFormatter(),
        structlog.processors.TimeStamper(fmt="iso"),
        structlog.processors.StackInfoRenderer(), structlog.processors.format_exc_info,
        structlog.processors.JSONRenderer()
    ],
    context_class=WrappedDictClass,
    logger_factory=structlog.stdlib.LoggerFactory(),
    wrapper_class=structlog.stdlib.BoundLogger,
    cache_logger_on_first_use=True,
)
Exemplo n.º 46
0
def log():
    return wrap_logger(logger(), context_class=wrap_dict(OrderedDict))