Ejemplo n.º 1
0
def configure_logging(pre_chain):
    """
    Configure logging to use ProcessorFormatter.
    """
    return logging.config.dictConfig({
        "version": 1,
        "disable_existing_loggers": False,
        "formatters": {
            "plain": {
                "()": ProcessorFormatter,
                "processor": ConsoleRenderer(colors=False),
                "foreign_pre_chain": pre_chain,
                "format": "%(message)s [in %(funcName)s]"
            }
        },
        "handlers": {
            "default": {
                "level": "DEBUG",
                "class": "logging.StreamHandler",
                "formatter": "plain",
            },
        },
        "loggers": {
            "": {
                "handlers": ["default"],
                "level": "DEBUG",
                "propagate": True,
            },
        }
    })
Ejemplo n.º 2
0
 def get_logger_handler(self):
     logger_handler = logging.StreamHandler(self.stdout)
     formatter = ProcessorFormatter(
         ConsoleRenderer()  # colorize=True
     )  # TODO: some kind of logging.getFormatter (using key, passed to dictConfig)?
     logger_handler.setFormatter(formatter)
     return logger_handler
def alternate_dev_formatter():
    debugging = ConsoleRenderer()
    def with_memoized_loggers(logger, call_name, event_dict):
        if logger.getEffectiveLevel() > logging.DEBUG:
            return info_formatter(logger, call_name, event_dict)
            return standard(logger, call_name, event_dict)
        return debugging(logger, call_name, event_dict)
    return with_memoized_loggers
Ejemplo n.º 4
0
def setup(sentry: str, debug: bool = False) -> None:
    processors = [
        filter_by_level,
        add_log_level,
        add_logger_name,
        PositionalArgumentsFormatter(),
        StackInfoRenderer(),
        format_exc_info,
        UnicodeDecoder(),
    ]

    configure(
        logger_factory=LoggerFactory(),
        wrapper_class=BoundLogger,
        cache_logger_on_first_use=True,
    )

    if debug:
        styles = ConsoleRenderer.get_default_level_styles()
        styles["debug"] = DIM
        processors += [
            TimeStamper(fmt="%Y-%m-%d %H:%M:%S"),
            ConsoleRenderer(level_styles=styles),
        ]
    else:
        handler = StreamHandler()
        formatter = CustomJsonFormatter("%(levelname)s %(name)s %(message)s")
        handler.setFormatter(formatter)
        for module in ("tornado", "tortoise", "aiomysql"):
            getLogger(module).addHandler(handler)

        sentry_logging = LoggingIntegration(level=INFO, event_level=ERROR)
        init(sentry, integrations=[sentry_logging])
        processors.append(JSONRenderer())

    handler = StreamHandler()
    configure(processors=processors)
    log = get_logger("api")
    log.addHandler(handler)
    log.propagate = False
    log.setLevel(DEBUG if debug else INFO)
def configure_logging():
    log_level = logging.INFO
    debug = os.getenv("FLASK_ENV") == "development"
    if debug:
        log_level = logging.DEBUG

    log_handler = logging.StreamHandler(sys.stdout)
    log_handler.setLevel(log_level)
    log_handler.addFilter(lambda record: record.levelno <= logging.WARNING)

    error_log_handler = logging.StreamHandler(sys.stderr)
    error_log_handler.setLevel(logging.ERROR)

    logging.basicConfig(level=log_level,
                        format="%(message)s",
                        handlers=[error_log_handler, log_handler])

    # Set werkzeug logging level
    werkzeug_logger = logging.getLogger("werkzeug")
    werkzeug_logger.setLevel(level=log_level)

    def parse_exception(_, __, event_dict):
        if debug:
            return event_dict
        exception = event_dict.get("exception")
        if exception:
            event_dict["exception"] = exception.replace('"', "'").split("\n")
        return event_dict

    # setup file logging
    renderer_processor = (ConsoleRenderer() if debug else JSONRenderer(
        serializer=json_dumps))
    processors = [
        add_log_level,
        TimeStamper(key="created", fmt="iso"),
        add_service,
        format_exc_info,
        parse_exception,
        renderer_processor,
    ]

    configure(
        context_class=wrap_dict(dict),
        logger_factory=LoggerFactory(),
        processors=processors,
        cache_logger_on_first_use=True,
    )
Ejemplo n.º 6
0
    def tail(self, pretty):
        '''
        Tail the logs of a topic
        '''
        master = self.ensure_master()
        if not self.state['default_topic']:
            err_msg = 'No default topic'
            prRed(err_msg)
            sys.exit(1)
        else:
            topic_name = self.state['default_topic']['topic_name']

            tail_logs_url = self.TAIL_LOGS_URL.format(host=master.host,
                                                      port=master.port,
                                                      topic_name=topic_name)

            try:
                session = requests.session()
                resp = session.get(tail_logs_url, stream=True)
                c = ConsoleRenderer()
                for line in resp.iter_lines():
                    log = dict()
                    try:
                        result = json.loads(line.decode('utf-8'))
                        result = result.get('result')
                        if result: log = json.loads(result)
                        else: continue
                    except ValueError:
                        print(Exception('ValueError log:{}'.format(result)))
                        continue
                    if pretty:
                        print(c(None, None, log))
                    else:
                        print(log)
            except requests.exceptions.ConnectionError:
                err_msg = 'Cannot request master'
                prRed(err_msg)
                sys.exit(1)
            except Exception as e:
                if resp: resp.close()
                raise e
                sys.exit(1)
Ejemplo n.º 7
0
def configure_logging():
    # set up some sane logging, as opposed to what flask does by default
    log_format = "%(message)s"
    levels = {
        'CRITICAL': logging.CRITICAL,
        'ERROR': logging.ERROR,
        'WARNING': logging.WARNING,
        'INFO': logging.INFO,
        'DEBUG': logging.DEBUG,
    }
    handler = logging.StreamHandler()
    logging.basicConfig(level=levels[EQ_LOG_LEVEL],
                        format=log_format,
                        handlers=[handler])

    # turn boto logging to critical as it logs far too much and it's only used
    # for cloudwatch logging
    logging.getLogger("botocore").setLevel(logging.ERROR)
    if EQ_CLOUDWATCH_LOGGING:
        _setup_cloud_watch_logging()

    # Set werkzeug logging level
    werkzeug_logger = logging.getLogger('werkzeug')
    werkzeug_logger.setLevel(level=levels[EQ_WERKZEUG_LOG_LEVEL])

    # setup file logging
    rotating_log_file = RotatingFileHandler(filename="eq.log",
                                            maxBytes=1048576,
                                            backupCount=10)
    logging.getLogger().addHandler(rotating_log_file)
    renderer_processor = ConsoleRenderer(
    ) if EQ_DEVELOPER_LOGGING else JSONRenderer()
    processors = [
        add_log_level,
        TimeStamper(key='created', fmt='iso'), add_service, format_exc_info,
        renderer_processor
    ]
    configure(context_class=wrap_dict(dict),
              logger_factory=LoggerFactory(),
              processors=processors,
              cache_logger_on_first_use=True)
Ejemplo n.º 8
0
def configure_logging():
    # set up some sane logging, as opposed to what flask does by default
    log_format = "%(message)s"
    levels = {
        'CRITICAL': logging.CRITICAL,
        'ERROR': logging.ERROR,
        'WARNING': logging.WARNING,
        'INFO': logging.INFO,
        'DEBUG': logging.DEBUG,
    }
    handler = logging.StreamHandler()
    logging.basicConfig(level=levels[EQ_LOG_LEVEL],
                        format=log_format,
                        handlers=[handler])

    # Set werkzeug logging level
    werkzeug_logger = logging.getLogger('werkzeug')
    werkzeug_logger.setLevel(level=levels[EQ_WERKZEUG_LOG_LEVEL])

    def parse_exception(_, __, event_dict):
        if EQ_DEVELOPER_LOGGING:
            return event_dict
        exception = event_dict.get('exception')
        if exception:
            event_dict['exception'] = exception.replace("\"", "'").split("\n")
        return event_dict

    # setup file logging
    renderer_processor = ConsoleRenderer(
    ) if EQ_DEVELOPER_LOGGING else JSONRenderer()
    processors = [
        add_log_level,
        TimeStamper(key='created', fmt='iso'), add_service, format_exc_info,
        parse_exception, renderer_processor
    ]
    configure(context_class=wrap_dict(dict),
              logger_factory=LoggerFactory(),
              processors=processors,
              cache_logger_on_first_use=True)
Ejemplo n.º 9
0
# Author: Shane R. Spencer <*****@*****.**>

# Structlog: https://github.com/hynek/structlog
from structlog import get_logger
from structlog.dev import ConsoleRenderer, set_exc_info, _has_colorama
from structlog.processors import TimeStamper, StackInfoRenderer, format_exc_info
from structlog.contextvars import merge_contextvars

_logger = get_logger(processors=[
    merge_contextvars,
    StackInfoRenderer(),
    set_exc_info,
    format_exc_info,
    TimeStamper(fmt="%Y-%m-%d %H:%M.%S", utc=False),
    ConsoleRenderer(colors=_has_colorama),
])

access_log = _logger.bind()
app_log = _logger.bind()
gen_log = _logger.bind()


class Wut(object):
    def __init__(self):
        ...


def stuff() -> None:
    return None
Ejemplo n.º 10
0
def main():
    c = ConsoleRenderer()
    for line in sys.stdin:
        d = json.loads(line)
        print(c(None, None, d))
Ejemplo n.º 11
0
 def __init__(self):
     self._worse_console_renderer = ConsoleRenderer(
         level_styles=_level_styles())
Ejemplo n.º 12
0
import os.path
import glob
import re
import sys
import subprocess
import shlex
from pathlib import Path
from functools import cmp_to_key

from structlog import get_logger, wrap_logger
from structlog.dev import ConsoleRenderer
import yaml
import semver
import pkgconfig

glog = wrap_logger(get_logger(), processors=[ConsoleRenderer(pad_event=42, colors=True)])

cc = os.getenv('CC', 'gcc')
cxx = os.getenv('CXX', 'g++')

syslibs = []
libpaths = re.findall(r'SEARCH_DIR\("(.+?)"\);', subprocess.check_output(shlex.split('bash -c "ld --verbose | grep SEARCH_DIR"')).decode('utf-8')) + os.getenv('LD_LIBRARY_PATH', '').split(':')

def find_lib(lib, pth):
  glog.info('find_lib()', lib=lib)
  for lp in pth:
    if os.path.isfile(lp + '/' + lib):
      return lp + '/' + lib
  glog.failure('unable to find a library', lib=lib)
  sys.exit(1)