Exemple #1
0
def main():
    """Setup logger and test logging."""
    # validate that Kafka configuration is available
    assert all([(key in os.environ) for key in REQUIRED_ENV_VARS])

    logger = logging.getLogger("test.logger")
    logger.propagate = False
    log_level = logging.DEBUG

    log_format = logging.Formatter(
        "%(asctime)s %(name)-12s %(levelname)-8s %(message)s",
        "%Y-%m-%dT%H:%M:%S")

    # create handler to show logs at stdout
    stdout_handler = logging.StreamHandler(sys.stdout)
    stdout_handler.setLevel(log_level)
    stdout_handler.setFormatter(log_format)
    logger.addHandler(stdout_handler)

    # create Kafka logging handler
    kafka_handler = KafkaLoggingHandler(
        os.environ["KAFKA_SERVER"],
        os.environ["KAFKA_TOPIC"],
        security_protocol="SSL",
        ssl_cafile=os.environ["KAFKA_CERT"],
        # you can configure how often logger will send logs to Kafka
        # flush_buffer_size=3,  # uncomment to see that it works slower
        # flush_interval=3.0,  # interval in seconds
        unhandled_exception_logger=logger,
        kafka_producer_args={
            "api_version_auto_timeout_ms": 1000000,
            "request_timeout_ms": 1000000,
        },
        # you can include arbitrary fields to all produced logs
        additional_fields={"service": "test_service"},
    )
    kafka_handler.setFormatter(log_format)
    logger.addHandler(kafka_handler)

    logger.setLevel(log_level)

    # test logging
    logger.debug("Test debug level logs")
    for idx in range(3):
        logger.info("Test log #%d", idx)
        time.sleep(0.5)

    # log unhandled top-level exception logging
    raise Exception("No try/except block here")
Exemple #2
0
def create_kafka_logging_handler(config):
    from nicos.core import ConfigurationError

    try:
        from kafka_logger.handlers import KafkaLoggingHandler

        if hasattr(config, 'kafka_logger'):
            url = urllib.parse.urlparse(config.kafka_logger)
            if not url.netloc or not url.path[1:]:
                raise ConfigurationError('kafka_logger: invalid url')
            kafka_handler = KafkaLoggingHandler(
                url.netloc,
                url.path[1:],
                security_protocol='PLAINTEXT',
            )
            kafka_handler.setLevel(logging.WARNING)
            return kafka_handler

    except ImportError:
        return
Exemple #3
0
def configure_logging(
    default_level,
    log_path=None,
    kafka_server=None,
    kafka_topic=None,
    kafka_cafile=None,
):
    logging.setLogRecordFactory(ContextLogRecord)

    formatter = logging.Formatter(
        fmt="{asctime} [{levelname}] [{log_context}] {name}:{lineno} {message}",
        style="{",
    )

    formatter_callback = logging.Formatter(
        fmt="{asctime} [{levelname}] {name}:{lineno} {message}", style="{")

    root_logger = logging.getLogger()

    if log_path is not None:
        file_log = logging.FileHandler(log_path)
        file_log.setFormatter(formatter)
        root_logger.addHandler(file_log)

    console_log = logging.StreamHandler()
    console_log.setFormatter(formatter)
    root_logger.addHandler(console_log)

    callback_id_log = file_callback_log.FileCallbackHandler()
    callback_id_log.setFormatter(formatter_callback)
    root_logger.addHandler(callback_id_log)

    # Cleanup of old log files
    asyncio.get_event_loop().create_task(
        file_callback_log.setup_clean_old_logfiles())

    root_logger.setLevel(getattr(logging, default_level))

    if kafka_server and kafka_topic and kafka_cafile:
        logger.info("Setting up Kafka logging handler")
        # we only care if you fail, kafka
        logger_kafka = logging.getLogger("kafka")
        logger_kafka.setLevel(logging.ERROR)

        kafka_handler_obj = KafkaLoggingHandler(
            kafka_server,
            kafka_topic,
            log_preprocess=[adjust_kafka_timestamp],
            ssl_cafile=kafka_cafile,
        )
        root_logger.addHandler(kafka_handler_obj)
Exemple #4
0
def main():
    """Setup logger and test logging."""
    global LOGGER

    # validate that Kafka configuration is available
    assert all([(key in os.environ) for key in REQUIRED_ENV_VARS])

    LOGGER = logging.getLogger("test.logger")
    LOGGER.propagate = False
    log_level = logging.DEBUG

    log_format = logging.Formatter(
        '%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
        '%Y-%m-%dT%H:%M:%S')

    # create handler to show logs at stdout
    stdout_handler = logging.StreamHandler(sys.stdout)
    stdout_handler.setLevel(log_level)
    stdout_handler.setFormatter(log_format)
    LOGGER.addHandler(stdout_handler)

    # create Kafka logging handler
    kafka_handler = KafkaLoggingHandler(
        os.environ['KAFKA_SERVER'],
        os.environ['KAFKA_TOPIC'],
        security_protocol='SSL',
        ssl_cafile=os.environ['KAFKA_CERT'],
        unhandled_exception_logger=LOGGER,
        additional_fields={"service": "test_service"})
    kafka_handler.setFormatter(log_format)
    LOGGER.addHandler(kafka_handler)

    LOGGER.setLevel(log_level)

    LOGGER.info("Hi there, I'm the main process! %s", get_process_thread())

    # test child processes
    child_processes = []
    for idx in range(CHILD_PROCESSES):
        child = Process(target=child_process,
                        name="Child process #{}".format(idx),
                        args=(idx, ))
        child_processes.append(child)
        child.start()

    import time
    time.sleep(1)  # in the main proc only
    alive = [proc.is_alive() for proc in child_processes]
    assert not any(alive)

    LOGGER.info('Multiprocessing logging.shutdown() works')

    threads = []
    for idx in range(CHILD_THREADS):
        thread = threading.Thread(
            target=thread_function,
            name="Thread of the main process #{}".format(idx),
            args=(idx, ))
        threads.append(thread)
        thread.start()
    # wait for threads to finish
    for thread in threads:
        thread.join()

    LOGGER.info('Multithreding logging.shutdown() works')
Exemple #5
0
def main():
    """Setup logger and test logging."""
    # validate that Kafka configuration is available
    assert all([(key in os.environ) for key in REQUIRED_ENV_VARS])

    logger = logging.getLogger("test.logger")
    logger.propagate = False
    log_level = logging.DEBUG

    log_format = logging.Formatter(
        "%(asctime)s %(name)-12s %(levelname)-8s %(message)s",
        "%Y-%m-%dT%H:%M:%S")

    # create handler to show logs at stdout
    stdout_handler = logging.StreamHandler(sys.stdout)
    stdout_handler.setLevel(log_level)
    stdout_handler.setFormatter(log_format)
    logger.addHandler(stdout_handler)

    # create Kafka logging handler
    kafka_handler = KafkaLoggingHandler(
        os.environ["KAFKA_SERVER"],
        os.environ["KAFKA_TOPIC"],
        security_protocol="SSL",
        ssl_cafile=os.environ["KAFKA_CERT"],
        unhandled_exception_logger=logger,
    )
    kafka_handler.setFormatter(log_format)
    logger.addHandler(kafka_handler)

    logger.setLevel(log_level)

    logger.info("Test log with int parameter: %d", 42)
    logger.info("Test log with multiple parameters: %d %f", 42, 43.2)

    logger.info("Test log with str parameter: %s", "test1")
    logger.info("Test log with multiple str parameters: %s %s", "test1",
                "test2")

    custom_object = CustomClassConvertable("test")
    # formatting uses __repr__ if __str__ method isn't available
    logger.info("Test logging of custom obj: %s", custom_object)
    # log record will contain the following values:
    # args: <__main__.CustomClass object at 0x7f3147041c88>
    # message: Test logging of custom obj: CustomClass: test

    # extra values have to be JSON serializable
    try:
        json.dumps(custom_object)
        # TypeError: Object of type 'CustomClass' is not JSON serializable
    except TypeError:
        logger.exception("Attempt to log non JSON serializable data")
    # please transform extra values to JSON
    logger.info(
        "Test custom objects in extra argument",
        extra={
            "custom_field_number": 42,
            "custom_field_json": {
                "a": "test",
                "b": "test"
            },
        },
    )

    # logging single object without formatting
    custom_object_with_str = CustomClassConvertable("test w/ str method")
    logger.info(custom_object_with_str)  # object has __str__ method
    custom_object_wo_str = CustomClass("test w/o str method")
    logger.info(custom_object_wo_str)  # str() will use repr()
Exemple #6
0
def main():
    """Setup logger and test logging."""
    # validate that Kafka configuration is available
    assert all([(key in os.environ) for key in REQUIRED_ENV_VARS])

    logger = logging.getLogger("test.logger")
    logger.propagate = False
    log_level = logging.DEBUG

    log_format = logging.Formatter(
        "%(asctime)s %(name)-12s %(levelname)-8s %(message)s",
        "%Y-%m-%dT%H:%M:%S")

    # create handler to show logs at stdout
    stdout_handler = logging.StreamHandler(sys.stdout)
    stdout_handler.setLevel(log_level)
    stdout_handler.setFormatter(log_format)
    logger.addHandler(stdout_handler)

    def remove_lineno(log):
        """Example preprocessor, includes lineno to message field."""
        log["message"] += " (at line {})".format(log["lineno"])
        del log["lineno"]
        log["custom_field"] = 42
        return log

    def hide_passwords(log):
        """Example preprocessor, hides passwords."""
        for password in PASSWORDS:
            hidden_password = password[0] + "*" * (len(password) -
                                                   2) + password[-1]
            log["message"] = log["message"].replace(password, hidden_password)
        return log

    # create Kafka logging handler
    kafka_handler = KafkaLoggingHandler(
        os.environ["KAFKA_SERVER"],
        os.environ["KAFKA_TOPIC"],
        security_protocol="SSL",
        ssl_cafile=os.environ["KAFKA_CERT"],
        # you can configure how often logger will send logs to Kafka
        # flush_buffer_size=3,  # uncomment to see that it works slower
        # flush_interval=3.0,  # interval in seconds
        unhandled_exception_logger=logger,
        kafka_producer_args={
            "api_version_auto_timeout_ms": 1000000,
            "request_timeout_ms": 1000000,
        },
        # you can include arbitrary fields to all produced logs
        additional_fields={"service": "test_service"},
        log_preprocess=[remove_lineno, hide_passwords],
    )
    kafka_handler.setFormatter(log_format)
    logger.addHandler(kafka_handler)

    logger.setLevel(log_level)

    # test logging
    logger.debug("Test debug level logs")
    for idx in range(3):
        logger.info("Test log #%d: %s", idx, random.choice(PASSWORDS))
        time.sleep(0.5)
Exemple #7
0
def main():
    """Setup logger and test logging."""
    global LOGGER

    # validate that Kafka configuration is available
    assert all([(key in os.environ) for key in REQUIRED_ENV_VARS])

    LOGGER = logging.getLogger("test.logger")
    LOGGER.propagate = False
    log_level = logging.DEBUG

    log_format = logging.Formatter(
        "%(asctime)s %(name)-12s %(levelname)-8s %(message)s", "%Y-%m-%dT%H:%M:%S"
    )

    # create handler to show logs at stdout
    stdout_handler = logging.StreamHandler(sys.stdout)
    stdout_handler.setLevel(log_level)
    stdout_handler.setFormatter(log_format)
    LOGGER.addHandler(stdout_handler)

    # create Kafka logging handler
    kafka_handler = KafkaLoggingHandler(
        os.environ["KAFKA_SERVER"],
        os.environ["KAFKA_TOPIC"],
        security_protocol="SSL",
        ssl_cafile=os.environ["KAFKA_CERT"],
        unhandled_exception_logger=LOGGER,
        additional_fields={"service": "test_service"},
    )
    kafka_handler.setFormatter(log_format)
    LOGGER.addHandler(kafka_handler)

    LOGGER.setLevel(log_level)

    LOGGER.info("Hi there, I'm the main process! %s", get_process_thread())

    # test child processes
    child_processes = []
    for idx in range(CHILD_PROCESSES):
        child = Process(
            target=child_process, name="Child process #{}".format(idx), args=(idx,)
        )
        child_processes.append(child)
        child.start()

    # testing threads in the main process
    threads = []
    for idx in range(MAIN_PROCESS_THREADS):
        thread = threading.Thread(
            target=main_process_thread,
            name="Thread of the main process #{}".format(idx),
            args=(idx,),
        )
        threads.append(thread)
        thread.start()
    # wait for threads to finish
    for thread in threads:
        thread.join()

    # there is a chance of logs loss
    # if the main process terminates without joining child processes
    for child in child_processes:
        child.join()

    # test if a child of a child process logs correctly
    child_with_subprocess = Process(
        target=child_process_with_grandchild,
        name="Child process that spawns another child",
    )
    child_with_subprocess.start()
    child_with_subprocess.join()

    # test threads in a child process
    child_with_threads = Process(
        target=child_process_with_threads, name="Child process that has a thread pool"
    )
    child_with_threads.start()
    child_with_threads.join()

    # test unhandled exception in a child process
    child_exception = Process(
        target=child_process_with_exception, name="Child process with an exception"
    )
    child_exception.start()
    child_exception.join()

    # top-level exception works only in the main process
    raise Exception("Testing top-level exception!")
Exemple #8
0
def configure_logging(
    default_level,
    log_path=None,
    kafka_server=None,
    kafka_topic=None,
    kafka_cafile=None,
    kafka_sasl_username=None,
    kafka_sasl_password=None,
):
    logging.setLogRecordFactory(ContextLogRecord)

    # stdout
    # 'process' is the custom logger name used when printing live output from a CLI process started by Repour
    formatter = log_util.CustomFormatter(
        "{asctime} [{levelname}] [{log_context}] {name}:{lineno} {message}",
        "process",
        "{asctime} {message}",
    )
    json_formatter = json_custom_formatter.JsonCustomFormatter(
        "%(timestamp)s %(level)s %(name)s %(message)s %(hostName)s %(mdc)s"
    )

    # for callback to send full logs to caller, + to kafka logging
    # 'process' is the custom logger name used when printing live output from a CLI process started by Repour
    formatter_callback = log_util.CustomFormatter(
        "{asctime} [{levelname}] {name}:{lineno} {message}",
        "process",
        "{asctime} {message}",
    )

    root_logger = logging.getLogger()
    if log_path is not None:
        file_log = logging.FileHandler(log_path)
        file_log.setFormatter(formatter)
        root_logger.addHandler(file_log)

    console_log = logging.StreamHandler()
    console_log.setFormatter(json_formatter)
    root_logger.addHandler(console_log)

    callback_id_log = file_callback_log.FileCallbackHandler()
    callback_id_log.setFormatter(formatter_callback)
    root_logger.addHandler(callback_id_log)

    print("Callback handler setup")

    # Cleanup of old log files
    asyncio.get_event_loop().create_task(file_callback_log.setup_clean_old_logfiles())

    root_logger.setLevel(getattr(logging, default_level))

    if (
        kafka_server
        and kafka_topic
        and (kafka_cafile or (kafka_sasl_username and kafka_sasl_password))
    ):
        logger.info("Setting up Kafka logging handler")
        # we only care if you fail, kafka
        logger_kafka = logging.getLogger("kafka")
        logger_kafka.setLevel(logging.ERROR)

        kwargs = {}

        security_protocol = None
        if kafka_sasl_username and kafka_sasl_password:
            logger.info("Configuring Kafka logging with SASL authentication")
            kwargs["sasl_mechanism"] = "PLAIN"
            kwargs["sasl_plain_username"] = kafka_sasl_username
            kwargs["sasl_plain_password"] = kafka_sasl_password
            security_protocol = "SASL_SSL"
        else:
            security_protocol = "SSL"

        try:
            kafka_handler_obj = KafkaLoggingHandler(
                kafka_server,
                kafka_topic,
                log_preprocess=[adjust_kafka_metadata],
                ssl_cafile=kafka_cafile,
                security_protocol=security_protocol,
                kafka_producer_args=kwargs,
            )
            kafka_handler_obj.setFormatter(formatter_callback)
            root_logger.addHandler(kafka_handler_obj)
        except Exception:
            logger.exception("Kafka logging could not be setup")
import logging
import logging.config
from kafka_logger.handlers import KafkaLoggingHandler

logging.config.fileConfig('logging.conf')
logger = logging.getLogger('ktig')

KAFKA_BOOTSTRAP_SERVER = ('kafka:9092')
TOPIC = 'ktig'

kafka_handler_obj = KafkaLoggingHandler(KAFKA_BOOTSTRAP_SERVER, TOPIC,
                                        security_protocol='PLAINTEXT',
                                        unhandled_exception_logger=logger)

logger.addHandler(kafka_handler_obj)

logger.setLevel(logging.DEBUG)


logger.info({'measure_id': 1, 'interval': 10, 'timestamp': '',
             'color': 'red', 'sender_tx_counter': 50,
             'sender_rx_counter': 50, 'reflector_tx_counter': 48,
             'reflector_rx_counter': 48})