def test_override_dict(): reset_unittests() add_override("foo2.*", "CRITICAL") x = get_logger("foo.bar") y = get_logger("foo2.bar") x.warning("foo") y.warning("foo2") assert UNIT_TESTS_STDOUT == [] _test_stdxxx(UNIT_TESTS_STDERR, "WARNING", "foo") _test_json("WARNING", "foo") add_override("foo.*", None)
def test_basic_log(): reset_unittests() x = get_logger() x.log(logging.WARNING, "foo") assert UNIT_TESTS_STDOUT == [] _test_stdxxx(UNIT_TESTS_STDERR, "WARNING", "foo") _test_json("WARNING", "foo")
def test_basic_debug(): reset_unittests() x = get_logger() x.debug("foo") assert UNIT_TESTS_STDERR == [] assert UNIT_TESTS_JSON == [] assert UNIT_TESTS_STDOUT == []
def test_empty_call2(): reset_unittests() x = get_logger() x.info(None) assert UNIT_TESTS_STDERR == [] assert UNIT_TESTS_JSON == [] _test_stdxxx(UNIT_TESTS_STDOUT, "INFO", "None")
def test_basic_info(): reset_unittests() x = get_logger() x.info("foo") assert UNIT_TESTS_STDERR == [] assert UNIT_TESTS_JSON == [] _test_stdxxx(UNIT_TESTS_STDOUT, "INFO", "foo")
def test_template_info2(): reset_unittests() x = get_logger() x.info("foo%(u)s", {"u": "bar"}) assert UNIT_TESTS_STDERR == [] assert UNIT_TESTS_JSON == [] _test_stdxxx(UNIT_TESTS_STDOUT, "INFO", "foobar")
def test_basic_error(): reset_unittests() x = get_logger() x.error("foo") assert UNIT_TESTS_STDOUT == [] _test_stdxxx(UNIT_TESTS_STDERR, "ERROR", "foo") _test_json("ERROR", "foo")
def test_basic_critical(): reset_unittests() x = get_logger() x.critical("foo") assert UNIT_TESTS_STDOUT == [] _test_stdxxx(UNIT_TESTS_STDERR, "CRITICAL", "foo") _test_json("CRITICAL", "foo")
def main(): parser = argparse.ArgumentParser("log a message with standard metwork " "logging system") parser.add_argument('--application-name', '-a', action="store", default="default", help="application name") parser.add_argument( 'LEVEL', action='store', help="Log level", choices=['ERROR', 'CRITICAL', 'WARNING', 'INFO', 'DEBUG']) parser.add_argument('MESSAGE', action='store', help="message to log") options = parser.parse_args() logger = get_logger(options.application_name) if options.LEVEL == 'DEBUG': logger.debug(options.MESSAGE) elif options.LEVEL == 'INFO': logger.info(options.MESSAGE) elif options.LEVEL == 'WARNING': logger.warning(options.MESSAGE) elif options.LEVEL == 'CRITICAL': logger.critical(options.MESSAGE) elif options.LEVEL == 'ERROR': logger.error(options.MESSAGE) else: raise Exception("Bad message level: %s", options.LEVEL)
async def mflog_middleware(request, handler): request_id = request.headers.get("X-Request-Id", None) if PLUGIN: log = get_logger("%s.aiohttp" % PLUGIN) else: log = get_logger("aiohttp") if request_id: log = log.bind(request_id=request_id) request['mflog_logger'] = log try: return await handler(request) except web.HTTPException: raise except Exception: log.exception("exception catched") return web.Response(text="HTTP/500", status=500)
def _uncompress(method, strict, xaf, logger=None): logr = logger if logger is None: logr = get_logger("acquisition._uncompress") if method == "gzip": cmodule = gzip elif method == "bzip2": cmodule = Py2Py3Bzip2Wrapper else: raise Exception("unknown compression method: %s" % method) tmp_filepath = _get_tmp_filepath("uncompress_decorator", method) try: with cmodule.open(xaf.filepath, 'rb') as f_in: with open(tmp_filepath, 'wb') as f_out: shutil.copyfileobj(f_in, f_out) except Exception: if strict: logr.warning("can't uncompress (%s) in (%s) with %s method", xaf.filepath, tmp_filepath, method) return None else: return xaf new_xaf = xaf.copy_tags_on(tmp_filepath) xaf.delete() return new_xaf
def test_logger_name(): reset_unittests() x = get_logger("foo.bar") x.info("test") assert UNIT_TESTS_STDERR == [] assert UNIT_TESTS_JSON == [] _test_stdxxx(UNIT_TESTS_STDOUT, "INFO", "test") assert "foo.bar" in UNIT_TESTS_STDOUT[0]
def __init__(self, name: str, slot_number: int, cmd: Cmd): self.name: str = name self.cmd: Cmd = cmd self.logger = mflog.get_logger("alwaysup.service").bind(id=self.name) StateMixin.__init__(self, logger=self.logger) self.slots: Dict[int, ProcessSlot] = {} self.slot_number: int = slot_number self.set_state(ServiceState.STOPPED)
def test_bytes(): reset_unittests() x = get_logger() x.warning(b"foo", k1=1, k2=b"bar") assert UNIT_TESTS_STDOUT == [] _test_stdxxx(UNIT_TESTS_STDERR, "WARNING", "foo", "{k1=1 k2=bar}") tmp = _test_json("WARNING", "foo") assert tmp['k1'] == 1 assert tmp['k2'] == 'bar'
def test_utf8(): reset_unittests() x = get_logger() x.warning(u"fooééé", k1=1, k2=u"barààà") assert UNIT_TESTS_STDOUT == [] _test_stdxxx(UNIT_TESTS_STDERR, "WARNING", u"fooééé", u"{k1=1 k2=barààà}") tmp = _test_json("WARNING", u"fooééé") assert tmp['k1'] == 1 assert tmp['k2'] == u'barààà'
def test_json_only_keys1(): reset_unittests() set_config(json_only_keys=["extra_context_key1", "extra_context_key2"]) x = get_logger("foo.bar") x = x.bind(k1=1, k2="bar") x.info("foo", k1=2, k3=2) assert UNIT_TESTS_STDERR == [] assert UNIT_TESTS_JSON == [] _test_stdxxx(UNIT_TESTS_STDOUT, "INFO", "foo", "{k1=2 k2=bar k3=2}")
def test_json_only_keys2(): reset_unittests() os.environ["MFLOG_JSON_ONLY_KEYS"] = \ "extra_context_key1,extra_context_key2" x = get_logger("foo.bar") x = x.bind(k1=1, k2="bar") x.info("foo", k1=2, k3=2) assert UNIT_TESTS_STDERR == [] assert UNIT_TESTS_JSON == [] _test_stdxxx(UNIT_TESTS_STDOUT, "INFO", "foo", "{k1=2 k2=bar k3=2}")
def test_bind(): reset_unittests() x = get_logger("foo.bar") x = x.bind(k1=1) x = x.bind(k2='bar') x.warning("foo") assert UNIT_TESTS_STDOUT == [] _test_stdxxx(UNIT_TESTS_STDERR, "WARNING", "foo", "{k1=1 k2=bar}") tmp = _test_json("WARNING", "foo") assert tmp['k1'] == 1 assert tmp['k2'] == 'bar'
def test_extra_context(): reset_unittests() set_config(extra_context_func=extra_context) x = get_logger("foo.bar") x = x.bind(k1=1, k2="bar") x.info("foo", k1=2, k3=2) assert UNIT_TESTS_STDERR == [] assert UNIT_TESTS_JSON == [] _test_stdxxx( UNIT_TESTS_STDOUT, "INFO", "foo", "{extra_context_key1=extra_context_value1 " "extra_context_key2=extra_context_value2 k1=2 k2=bar k3=2}")
def test_extra_context2(): reset_unittests() os.environ["MFLOG_EXTRA_CONTEXT_FUNC"] = "mflog.unittests.extra_context" x = get_logger("foo.bar") x = x.bind(k1=1, k2="bar") x.info("foo", k1=2, k3=2) assert UNIT_TESTS_STDERR == [] assert UNIT_TESTS_JSON == [] _test_stdxxx( UNIT_TESTS_STDOUT, "INFO", "foo", "{extra_context_key1=extra_context_value1 " "extra_context_key2=extra_context_value2 k1=2 k2=bar k3=2}")
def __init__(self, name_prefix, slot_number, cmd: Cmd): self.name_prefix = name_prefix self.slot_number: int = slot_number self.name = self.name_prefix + "." + str(self.slot_number) self.cmd: Cmd = Cmd.copy_and_add_to_context(cmd, {"SLOT": self.slot_number}) self.logger = mflog.get_logger("alwaysup.process_slot").bind( id=self.name) StateMixin.__init__(self, logger=self.logger) self.managed_process: Optional[ManagedProcess] = None self.set_state(ProcessSlotState.STOPPED) self._manage_task = asyncio.create_task(log_exceptions(self._manage())) self._waiting_for_restart_task = None
def test_basic_exception(): reset_unittests() x = get_logger() try: 1 / 0 except Exception: x.exception("foo") assert UNIT_TESTS_STDOUT == [] _test_stdxxx(UNIT_TESTS_STDERR, "ERROR", "foo") tmp = _test_json("ERROR", "foo") assert len(tmp['exception']) > 10 assert tmp['exception_type'] == 'ZeroDivisionError' assert tmp['exception_file'] == __file__
def _remove_first_line(xaf, logger=None): logr = logger if logger is None: logr = get_logger("acquisition._remove_first_line") tmp_filepath = _get_tmp_filepath("remove_first_line_decorator", "main") try: with open(xaf.filepath, "rb") as f_in: f_in.readline() with open(tmp_filepath, "wb") as f_out: shutil.copyfileobj(f_in, f_out) except Exception: logr.warning("can't remove first line from (%s) to (%s)", xaf.filepath, tmp_filepath) return False new_xaf = xaf.copy_tags_on(tmp_filepath) return new_xaf
def __init__( self, name_prefix: str, cmd: Cmd, ): self.cmd: Cmd = cmd self.id: str = get_unique_hexa_identifier()[0:10] self.name: str = f"{name_prefix}.managed_process.{self.id}" self.logger = mflog.get_logger("alwaysup.managed_process").bind( id=self.name) StateMixin.__init__(self, logger=self.logger) self.process: Optional[Process] = None self.pid: Optional[int] = None self.returncode: Optional[int] = None self.set_state(ManagedProcessState.READY) self._wait_for_process_end_task: Optional[asyncio.Task] = None self.cmd_line: Optional[str] = None
def test_issue3(): reset_unittests() x = get_logger() try: 1 / 0 except Exception as e: x.exception(e) assert UNIT_TESTS_STDOUT == [] if six.PY2: _test_stdxxx(UNIT_TESTS_STDERR, "ERROR", "integer division or modulo by zero") tmp = _test_json("ERROR", "integer division or modulo by zero") else: _test_stdxxx(UNIT_TESTS_STDERR, "ERROR", "division by zero") tmp = _test_json("ERROR", "division by zero") assert len(tmp['exception']) > 10 assert tmp['exception_type'] == 'ZeroDivisionError' assert tmp['exception_file'] == __file__
def __init__( self, services_to_add: List[Service] = [], port: int = 0, bind_host: str = "127.0.0.1", log_configure_logger: bool = True, log_minimal_level: str = "INFO", log_fancy_output: Optional[bool] = None, ): self.manager: Manager = Manager() self.__wait_task = None self.services_to_add = services_to_add self.__shutdown_task = None self.port = port self.bind_host = bind_host self.log_minimal_level = log_minimal_level self.log_fancy_output = log_fancy_output self.log_configure_logger = log_configure_logger if self.log_configure_logger: mflog.set_config(fancy_output=self.log_fancy_output, minimal_level=self.log_minimal_level) self.logger = mflog.get_logger("alwaysup.daemon")
#!/usr/bin/env python3 import os import sys from mflog import get_logger from mfutil import BashWrapper, get_tmp_filepath from mfplugin.compat import get_installed_plugins MFMODULE_HOME = os.environ.get("MFMODULE_HOME", None) MFMODULE = os.environ.get("MFMODULE", None) LOGGER = get_logger("_make_crontab.py") if not os.path.isfile(f"{MFMODULE_HOME}/config/crontab"): sys.exit(0) # FIXME: deprecated => remove for 0.11 release os.environ["RUNTIME_SUFFIX"] = "" x = BashWrapper(f"cat {MFMODULE_HOME}/config/crontab " "|envtpl --reduce-multi-blank-lines") if not x: LOGGER.critical("can't build module level crontab, details: %s" % x) sys.exit(1) print(x.stdout) plugins = [] try: plugins = get_installed_plugins() except Exception: pass for plugin in plugins:
import mflog # Get a logger logger = mflog.get_logger("foobar") # Bind two context variables to this logger logger = logger.bind(user_id=1234, is_logged=True) # Log something logger.info("This is an info message", special_value="foo") logger.critical("This is a very interesting critical message") # Let's play with exception try: # Just set a variable to get a demo of locals variable dump var = {"key1": [1, 2, 3], "key2": "foobar"} 1/0 except Exception: logger.exception("exception raised (a variables dump should follow)")
import datetime import time import ciso8601 import signal import socketserver import queue import pytz import sys import threading import logging from mflog import get_logger, set_config DESCRIPTION = "syslog daemon which accept only UTF-8 JSON messages to send " \ "them to an elasticsearch instance with an optional transformation" LOG = get_logger("jsonsyslog2elasticsearch") RUNNING = True TO_SEND = [] CHUNK_SIZE = 20000 SYSLOG_THREAD = None LOG_QUEUE_SIZE_EVERY = 5 DISCARDED = 0 def silent_elasticsearch_logger(): logging.getLogger("elasticsearch").setLevel(logging.CRITICAL) def signal_handler(signum, frame): global RUNNING LOG.info("Signal: %i handled => let's stop", signum)
import fnmatch import re import os import importlib import sys import time from mflog import get_logger from opinionated_configparser import OpinionatedConfigParser LOGGER = get_logger("switch/rules") class add_sys_path(): def __init__(self, path): self.path = path def __enter__(self): if self.path is not None and self.path != "": sys.path.insert(0, self.path) def __exit__(self, exc_type, exc_value, traceback): try: if self.path is not None and self.path != "": sys.path.remove(self.path) except ValueError: pass class BadSyntax(Exception):