Beispiel #1
0
def setup_logfile(filename, level=DEBUG, maxlevel=None):
    if filename in logfiles:
        return
    logfiles.add(filename)

    logger = logging.getLogger()

    handler = FileHandler(filename, encoding='utf-8')
    add_common_filters(handler)
    handler.setLevel(DEBUG)
    fmt = LogFormatter(
        fmt="%(asctime)s [%(name)s] %(levelname)s: %(message)s",
        output_markers=(START_MARKER, END_MARKER))
    handler.setFormatter(fmt)

    if maxlevel:
        filt = MaxFilter(maxlevel)
        handler.addFilter(filt)

    logger.addHandler(handler)
    logger.setLevel(min(logger.level, level))

    if cache_handler is not None:
        cache_handler.replay(handler)

    return handler
Beispiel #2
0
def addHandler(handler=None, stream=None, filename=None, filemode='a',
               format=None, datefmt=None, level=None, max_level=None,
               filters=(), logger=None):
    """stream, filename, filemode, format, datefmt: as per logging.basicConfig

       handler: use a precreated handler instead of creating a new one
       logging: logging to add the handler to (uses root logging if none specified)
       filters: an iterable of filters to add to the handler
       level: only messages of this level and above will be processed
       max_level: only messages of this level and below will be processed
    """
    # Create the handler if one hasn't been passed in
    if handler is None:
        if filename is not None:
            handler = FileHandler(filename, filemode)
        else:
            handler = StreamHandler(stream)
    # Set up the formatting of the log messages
    # New API, so it can default to str.format instead of %-formatting
    formatter = Formatter(format, datefmt)
    handler.setFormatter(formatter)
    # Set up filtering of which messages to handle
    if level is not None:
        handler.setLevel(level)
    if max_level is not None:
        handler.addFilter(LowPassFilter(max_level))
    for filter in filters:
        handler.addFilter(filter)
    # Add the fully configured handler to the specified logging
    if logger is None:
        logger = getLogger()
    logger.addHandler(handler)
    return handler
Beispiel #3
0
        def setHandler(logger, lvl, path, _format):
            """
            Set right handler related to input lvl, path and format.

            :param Logger logger: logger on which add an handler.
            :param str lvl: logging level.
            :param str path: file path.
            :param str _format: logging message format.
            """

            class _Filter(Filter):
                """Ensure message will be given for specific lvl"""

                def filter(self, record):
                    return record.levelname == lvl

            # get the rights formatter and filter to set on a file handler
            handler = FileHandler(path)
            handler.addFilter(_Filter())
            handler.setLevel(lvl)
            formatter = Formatter(_format)
            handler.setFormatter(formatter)

            # if an old handler exist, remove it from logger
            if hasattr(logger, lvl):
                old_handler = getattr(logger, lvl)
                logger.removeHandler(old_handler)

            logger.addHandler(handler)
            setattr(logger, lvl, handler)
Beispiel #4
0
def init_app(app, name=''):
    """
    Configures the provided app's logger.

    :param app: the application object to configure the logger
    :param name: the name of the logger to create and configure
    """

    # flask app object automatically registers its own debug logger if
    # app.debug is True. Remove it becuase debug logging is handled here
    # instead.
    del app.logger.handlers[:]

    log_path = app.config['LOG_PATH']
    log_level = app.config['LOG_LEVEL'] or ''
    log_filter = app.config['LOG_FILTER']
    log_ignore = app.config['LOG_IGNORE']

    handler = FileHandler(log_path) if log_path else StreamHandler()
    handler.setLevel(log_level.upper() or ('DEBUG' if app.debug else 'WARNING'))  # noqa
    handler.addFilter(MultiNameFilter(log_filter, log_ignore))
    handler.setFormatter(Formatter(
        '%(asctime)s %(process)s %(thread)-15s %(name)-10s %(levelname)-8s %(message)s',  # noqa
        '%H:%M:%S' if app.debug else '%Y-%m-%d %H:%M:%S%z'))

    logger = getLogger(name)
    logger.setLevel(handler.level)
    logger.addHandler(handler)
Beispiel #5
0
def setup_logfile(filename, level=DEBUG, maxlevel=None):
    if filename in logfiles:
        return
    logfiles.add(filename)

    logger = logging.getLogger()

    handler = FileHandler(filename, encoding='utf-8')
    add_common_filters(handler)
    handler.setLevel(level)
    fmt = LogFormatter(
        fmt="%(asctime)s [%(name)s] %(levelname)s: %(message)s",
        output_markers=(START_MARKER, END_MARKER))
    handler.setFormatter(fmt)

    if maxlevel:
        filt = MaxFilter(maxlevel)
        handler.addFilter(filt)

    logger.addHandler(handler)
    logger.setLevel(min(logger.level, level))

    if cache_handler is not None:
        cache_handler.replay(handler)

    return handler
Beispiel #6
0
 def add_chat_handler(self):
     # TODO better log file names. ^^ see above ^^ Also apply filter
     file_name = f"logs/chat.log"
     # create if doesnt exist
     open(os.path.join(dir_path, "..", f'{file_name}'), 'a').close()
     handler = FileHandler(filename=file_name)
     # apply filter
     handler.addFilter(ChatFilter())
     self.addHandler(handler)
Beispiel #7
0
def addHandler(*,
               handler=None,
               stream=None,
               filename=None,
               filemode='a',
               format=None,
               datefmt=None,
               style='{',
               level=None,
               max_level=None,
               filters=(),
               logger=None):
    """stream, filename, filemode, format, datefmt: as per logging.basicConfig

       handler: use a precreated handler instead of creating a new one
       logger: logger to add the handler to (uses root logger if none specified)
       filters: an iterable of filters to add to the handler
       level: only messages of this level and above will be processed
       max_level: only messages of this level and below will be processed
       style: as per logging.basicConfig, but defaults to '{' (i.e. str.format)
    """
    # Create the handler if one hasn't been passed in
    if handler is None:
        if filename is not None:
            handler = FileHandler(filename, filemode)
        else:
            handler = StreamHandler(stream)
    # Set up the formatting of the log messages
    # New API, so it can default to str.format instead of %-formatting
    formatter = Formatter(format, datefmt, style)
    handler.setFormatter(formatter)
    # Set up filtering of which messages to handle
    if level is not None:
        handler.setLevel(level)
    if max_level is not None:

        def level_ok(record):
            return record.levelno <= max_level

        handler.addFilter(level_ok)
    for filter in filters:
        handler.addFilter(filter)
    # Add the fully configured handler to the specified logger
    if logger is None:
        logger = getLogger()
    logger.addHandler(handler)
    return handler
    def test_json_formater_with_keys_fmt(self):
        with self.patch_open as open_mock:
            file_handler = FileHandler('/test/fake_file.log', encoding='UTF-8')
            file_handler.addFilter(TrackingFilter())
            file_handler.setFormatter(JsonFormatter(keys_fmt=[('lvl', 'levelname'), ('msg', 'message')]))
            logging.basicConfig()
            test_logger = logging.getLogger("test")
            test_logger.addHandler(file_handler)
            test_logger.setLevel(logging.DEBUG)

        test_logger.info("Msg")
        if six.PY3:
            open_mock.assert_called_once_with('/test/fake_file.log', 'a', encoding='UTF-8')
            open_mock.return_value.write.assert_has_calls([call(RegexpMatch(
                '{"lvl":"INFO","msg":"Msg"}')), call('\n')])
        else:
            open_mock.assert_called_once_with('/test/fake_file.log', 'a', 'UTF-8')
            open_mock.return_value.write.assert_called_once_with(RegexpMatch(
                '{"lvl":"INFO","msg":"Msg"}\n'))
 def test_json_formater_with_transaction(self):
     with self.patch_open as open_mock:
         file_handler = FileHandler('/test/fake_file.log', encoding='UTF-8')
         file_handler.addFilter(TrackingFilter())
         file_handler.setFormatter(JsonFormatter())
         logging.basicConfig()
         test_logger = logging.getLogger("test")
         test_logger.addHandler(file_handler)
         test_logger.setLevel(logging.DEBUG)
         local_context.trans = "trans"
         local_context.corr = "corr"
         local_context.op = "op"
         test_logger.info("Msg1")
         test_logger.debug("Msg2")
         test_logger.error("Msg3")
         if six.PY3:
             open_mock.assert_called_once_with('/test/fake_file.log', 'a', encoding='UTF-8')
             open_mock.return_value.write.assert_has_calls([
                 call(RegexpMatch('{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
                                  '"lvl":"INFO","corr":"corr","trans":"trans","op":"op",'
                                  '"comp":"tests_basic_config","msg":"Msg1"}')), call('\n'),
                 call(RegexpMatch('{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
                                  '"lvl":"DEBUG","corr":"corr","trans":"trans","op":"op",'
                                  '"comp":"tests_basic_config","msg":"Msg2"}')), call('\n'),
                 call(RegexpMatch('{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
                                  '"lvl":"ERROR","corr":"corr","trans":"trans","op":"op",'
                                  '"comp":"tests_basic_config","msg":"Msg3"}')),  call('\n')
             ])
         else:
             open_mock.assert_called_once_with('/test/fake_file.log', 'a', 'UTF-8')
             open_mock.return_value.write.assert_has_calls([
                 call(RegexpMatch('{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
                                  '"lvl":"INFO","corr":"corr","trans":"trans","op":"op",'
                                  '"comp":"tests_basic_config","msg":"Msg1"}\n')),
                 call(RegexpMatch('{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
                                  '"lvl":"DEBUG","corr":"corr","trans":"trans","op":"op",'
                                  '"comp":"tests_basic_config","msg":"Msg2"}\n')),
                 call(RegexpMatch('{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
                                  '"lvl":"ERROR","corr":"corr","trans":"trans","op":"op",'
                                  '"comp":"tests_basic_config","msg":"Msg3"}\n'))
             ])
    def test_json_formater_removing_empty_keys(self):
        with self.patch_open as open_mock:
            file_handler = FileHandler('/test/fake_file.log', encoding='UTF-8')
            file_handler.addFilter(TrackingFilter())
            file_handler.setFormatter(JsonFormatter(remove_blanks=True))
            logging.basicConfig()
            test_logger = logging.getLogger("test")
            test_logger.addHandler(file_handler)
            test_logger.setLevel(logging.DEBUG)

        test_logger.info("Msg")
        if six.PY3:
            open_mock.assert_called_once_with('/test/fake_file.log', 'a', encoding='UTF-8')
            open_mock.return_value.write.assert_has_calls([call(RegexpMatch(
                '{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
                '"lvl":"INFO","comp":"tests_basic_config","msg":"Msg"}')), call('\n')])
        else:
            open_mock.assert_called_once_with('/test/fake_file.log', 'a', 'UTF-8')
            open_mock.return_value.write.assert_called_once_with(RegexpMatch(
                '{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
                '"lvl":"INFO","comp":"tests_basic_config","msg":"Msg"}\n'))
    def test_json_formater_with_extra(self):
        with self.patch_open as open_mock:
            file_handler = FileHandler('/test/fake_file.log', encoding='UTF-8')
            file_handler.addFilter(TrackingFilter())
            file_handler.setFormatter(JsonFormatter())
            logging.basicConfig()
            test_logger = logging.getLogger("test")
            test_logger.addHandler(file_handler)
            test_logger.setLevel(logging.DEBUG)

        test_logger.info("Msg", extra={'additional': {'key': 'extra'}})
        if six.PY3:
            open_mock.assert_called_once_with('/test/fake_file.log', 'a', encoding='UTF-8')
            open_mock.return_value.write.assert_has_calls([call(RegexpMatch(
                '{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
                '"lvl":"INFO","corr":null,"trans":null,"op":null,'
                '"comp":"tests_basic_config","msg":"Msg","key":"extra"}')), call('\n')])
        else:
            open_mock.assert_called_once_with('/test/fake_file.log', 'a', 'UTF-8')
            open_mock.return_value.write.assert_called_once_with(RegexpMatch(
                '{"time":"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3}Z",'
                '"lvl":"INFO","corr":null,"trans":null,"op":null,'
                '"comp":"tests_basic_config","msg":"Msg","key":"extra"}\n'))
Beispiel #12
0
def addHandler(handler=None, stream=None, filename=None, filemode='a', \
               fmt=None, datefmt=None, \
               level=None, max_level=None, filters=(), logger=None):
    """stream, filename, filemode, format, datefmt: as per logging.basicConfig

       handler: use a precreated handler instead of creating a new one
       logger: logger to add the handler to (uses root logger if none specified)
       filters: an iterable of filters to add to the handler
       level: only messages of this level and above will be processed
       max_level: only messages of this level and below will be processed
    """
    # Create the handler if one hasn't been passed in
    if handler is None:
        if filename is not None:
            handler = FileHandler(filename, filemode)
        else:
            handler = StreamHandler(stream)
    # Set up the formatting of the log messages
    formatter = Formatter(fmt, datefmt)
    handler.setFormatter(formatter)
    # Set up filtering of which messages to handle
    if level is not None:
        handler.setLevel(level)
    if max_level is not None:
        class level_ok():
            def filter(self, record):
                return record.levelno <= max_level
        handler.addFilter(level_ok())
    for filter in filters:
        handler.addFilter(filter)
    # Add the fully configured handler to the specified logger
    if logger:
        logger.addHandler(handler)
    else:
        logging.getLogger().addHandler(handler)
    return handler
Beispiel #13
0
os.remove(exception_file)
os.remove(info_file)
MAX_BYTES = 50000
BACKUP_COUNT = 5
# formatters
formatter = logging.Formatter(
    '%(asctime)s - %(name)s - %(levelname)s - %(message)s')

main_logger = logging.getLogger(__name__)  # sets main logger
main_logger.setLevel(logging.INFO)  # sets main logger
# info logger
# info_logging = RotatingFileHandler(info_file, maxBytes=MAX_BYTES, backupCount=BACKUP_COUNT)
info_logging = FileHandler(info_file)
info_logging.setLevel(logging.INFO)
info_logging.setFormatter(formatter)
info_logging.addFilter(LogFilter(logging.INFO))
main_logger.addHandler(info_logging)

# warning logger
# exception_logging = RotatingFileHandler(exception_file, maxBytes=MAX_BYTES, backupCount=BACKUP_COUNT)
exception_logging = FileHandler(exception_file)
exception_logging.setLevel(logging.ERROR)
exception_logging.setFormatter(formatter)
exception_logging.addFilter(LogFilter(logging.ERROR))
main_logger.addHandler(exception_logging)

try:
    import json_functions as jf  # import samples queries
    import aiohttp  # import requests library - used in requesting webpage content
    from aiohttp import ClientSession
    import json  # to parse and create json requests
Beispiel #14
0
def init_logger(logger_name=settings.LOGGER_NAME,
                logging_level=settings.LOG_LEVEL,
                log_in_console=settings.LOG_IN_CONSOLE,
                log_in_file=settings.LOG_IN_FILE,
                logfile_name=settings.LOGGER_NAME,
                log_path=settings.LOG_PATH,
                split_logfile_by_level=settings.SPLIT_LOGFILE_BY_LEVEL):

    formatter = logging.Formatter(
        '[%(asctime)s] [%(process)d] [%(levelname)s] [%(request_id)s] %(message)s'
    )  # noqa

    if log_in_file:
        if not os.path.exists(log_path):
            os.makedirs(log_path)
        if split_logfile_by_level:
            logging.setLoggerClass(SplitLogger)
            logger = logging.getLogger(logger_name)
            level = logging.getLevelName(logging_level.upper())
            logger.setLevel(level)

            log_files = {
                logging.DEBUG:
                os.path.join(log_path, logfile_name + '.debug.log'),
                logging.INFO:
                os.path.join(log_path, logfile_name + '.info.log'),
                logging.WARNING:
                os.path.join(log_path, logfile_name + '.warning.log'),
                logging.ERROR:
                os.path.join(log_path, logfile_name + '.error.log'),
            }

            for log_level, log_file in log_files.items():
                file_handler = FileHandler(log_file)
                file_handler.setLevel(log_level)
                file_handler.setFormatter(formatter)
                file_handler.addFilter(RequestIDLogFilter())
                logger.addHandler(file_handler)
        else:
            logger = logging.getLogger(logger_name)
            level = logging.getLevelName(logging_level.upper())
            logger.setLevel(level)
            log_file = os.path.join(log_path, logfile_name + '.log')
            file_handler = FileHandler(log_file)
            file_handler.setLevel(logging.DEBUG)
            file_handler.setFormatter(formatter)
            file_handler.addFilter(RequestIDLogFilter())
            logger.addHandler(file_handler)

    if log_in_console:
        logger = logging.getLogger(logger_name)
        level = logging.getLevelName(logging_level.upper())
        logger.setLevel(level)
        console_handler = logging.StreamHandler()
        console_handler.name = "console"
        console_handler.setLevel(logging.DEBUG)
        console_handler.setFormatter(formatter)
        console_handler.addFilter(RequestIDLogFilter())
        logger.addHandler(console_handler)

    return logger
Beispiel #15
0
 def __init__(self, handler: logging.FileHandler):
     self.value = False
     handler.addFilter(lambda record: self.value)