예제 #1
0
def minimal_logger(namespace, extra_fields=None, debug=False):
    """Make and return a minimal console logger.

    NOTE: this does apparently *not* work with logbook as I first thought, and
    log handlers will *not* take care of output. If something is to be
    logged to a file in a module, the logger has to be implemented for
    that particular purpose.

    The current function is copied from cement.core.backend.

    :param namespace: namspace of logger
    """
    config = cf.load_config()
    log = logbook.Logger(namespace, level=logbook.INFO)
    s_h = logbook.StreamHandler(sys.stdout, level=logbook.INFO, bubble=True)
    log.handlers.append(s_h)
    try:
        host = config.get('log', 'redis_host')
        port = config.getint('log', 'redis_port')
        key = config.get('log', 'redis_key')
        password = config.get('log', 'redis_password')
        if not extra_fields:
            extra_fields = {"program": "pm", "command": namespace}
        r_h = RedisHandler(host=host,
                           port=port,
                           key=key,
                           password=password,
                           extra_fields=extra_fields,
                           level=logbook.INFO,
                           bubble=True)
        log.handlers.append(r_h)
    except:
        log.debug('Not loading RedisHandler')
        pass

    # FIX ME: really don't want to hard check sys.argv like this but
    # can't figure any better way get logging started (only for debug)
    # before the app logging is setup. Besides, this will fail for
    # tests since sys.argv will consist of the test call arguments.
    if '--debug' in sys.argv or debug:
        try:
            #If there was any problem loading the RedisHandler, at this point
            #the variable r_h will not exist
            r_h.level = logbook.DEBUG
        except UnboundLocalError:
            pass
        s_h.level = logbook.DEBUG
        log.level = logbook.DEBUG

    return log
예제 #2
0
def test_redis_handler_rpush():
    """
    Test if rpush stores messages in the right order
    old items should be first on list
    """
    import redis
    from logbook.queues import RedisHandler
    null_handler = logbook.NullHandler()

    redis_handler = RedisHandler(key='rpushed',
                                 push_method='rpush',
                                 level=logbook.INFO,
                                 bubble=True)

    with null_handler.applicationbound():
        with redis_handler:
            logbook.info("old item")
            logbook.info("new item")

    time.sleep(1.5)

    r = redis.Redis(decode_responses=True)
    logs = r.lrange('rpushed', 0, -1)
    assert logs
    assert "old item" in logs[0]
    r.delete('rpushed')
예제 #3
0
def minimal_logger(namespace, extra_fields=None, debug=False):
    """Make and return a minimal console logger.

    NOTE: this does apparently *not* work with logbook as I first thought, and
    log handlers will *not* take care of output. If something is to be
    logged to a file in a module, the logger has to be implemented for
    that particular purpose.

    The current function is copied from cement.core.backend.

    :param namespace: namspace of logger
    """
    config = cf.load_config()
    log = logbook.Logger(namespace, level=logbook.INFO)
    s_h = logbook.StreamHandler(sys.stdout, level = logbook.INFO, bubble=True)
    log.handlers.append(s_h)
    try:
        host = config.get('log', 'redis_host')
        port = config.getint('log', 'redis_port')
        key = config.get('log', 'redis_key')
        password = config.get('log', 'redis_password')
        if not extra_fields:
            extra_fields = {"program": "pm",
                            "command": namespace}
        r_h = RedisHandler(host=host, port=port, key=key, password=password,
                extra_fields=extra_fields, level=logbook.INFO, bubble=True)
        log.handlers.append(r_h)
    except:
        log.debug('Not loading RedisHandler')
        pass

    # FIX ME: really don't want to hard check sys.argv like this but
    # can't figure any better way get logging started (only for debug)
    # before the app logging is setup. Besides, this will fail for
    # tests since sys.argv will consist of the test call arguments.
    if '--debug' in sys.argv or debug:
        try:
            #If there was any problem loading the RedisHandler, at this point
            #the variable r_h will not exist
            r_h.level = logbook.DEBUG
        except UnboundLocalError:
            pass
        s_h.level = logbook.DEBUG
        log.level = logbook.DEBUG

    return log
예제 #4
0
def create_redis_handler(host="127.0.0.1",
                         port=6379,
                         key="ultros",
                         extra_fields=None,
                         flush_threshold=128,
                         flush_time=1,
                         level=0,
                         filter=None,
                         password=False,
                         bubble=True,
                         context=None,
                         push_method="rpush"):
    if not extra_fields:
        extra_fields = {}

    return ThreadedWrapperHandler(
        RedisHandler(host, port, key, extra_fields, flush_threshold,
                     flush_time, level, filter, password, bubble, context,
                     push_method))
예제 #5
0
def test_redis_handler():
    import redis
    from logbook.queues import RedisHandler

    KEY = 'redis'
    FIELDS = ['message', 'host']
    r = redis.Redis(decode_responses=True)
    redis_handler = RedisHandler(level=logbook.INFO, bubble=True)
    # We don't want output for the tests, so we can wrap everything in a NullHandler
    null_handler = logbook.NullHandler()

    # Check default values
    with null_handler.applicationbound():
        with redis_handler:
            logbook.info(LETTERS)

    key, message = r.blpop(KEY)
    # Are all the fields in the record?
    for field in FIELDS:
        assert message.find(field)
    assert key == KEY
    assert message.find(LETTERS)

    # Change the key of the handler and check on redis
    KEY = 'test_another_key'
    redis_handler.key = KEY

    with null_handler.applicationbound():
        with redis_handler:
            logbook.info(LETTERS)

    key, message = r.blpop(KEY)
    assert key == KEY

    # Check that extra fields are added if specified when creating the handler
    FIELDS.append('type')
    extra_fields = {'type': 'test'}
    del(redis_handler)
    redis_handler = RedisHandler(key=KEY, level=logbook.INFO,
                                 extra_fields=extra_fields, bubble=True)

    with null_handler.applicationbound():
        with redis_handler:
            logbook.info(LETTERS)

    key, message = r.blpop(KEY)
    for field in FIELDS:
        assert message.find(field)
    assert message.find('test')

    # And finally, check that fields are correctly added if appended to the
    # log message
    FIELDS.append('more_info')
    with null_handler.applicationbound():
        with redis_handler:
            logbook.info(LETTERS, more_info='This works')

    key, message = r.blpop(KEY)
    for field in FIELDS:
        assert message.find(field)
    assert message.find('This works')
예제 #6
0
def test_redis_handler():
    import redis
    from logbook.queues import RedisHandler

    KEY = 'redis'
    FIELDS = ['message', 'host']
    r = redis.Redis(decode_responses=True)
    redis_handler = RedisHandler(level=logbook.INFO, bubble=True)
    # We don't want output for the tests, so we can wrap everything in a NullHandler
    null_handler = logbook.NullHandler()

    # Check default values
    with null_handler.applicationbound():
        with redis_handler:
            logbook.info(LETTERS)

    key, message = r.blpop(KEY)
    # Are all the fields in the record?
    for field in FIELDS:
        assert message.find(field)
    assert key == KEY
    assert message.find(LETTERS)

    # Change the key of the handler and check on redis
    KEY = 'test_another_key'
    redis_handler.key = KEY

    with null_handler.applicationbound():
        with redis_handler:
            logbook.info(LETTERS)

    key, message = r.blpop(KEY)
    assert key == KEY

    # Check that extra fields are added if specified when creating the handler
    FIELDS.append('type')
    extra_fields = {'type': 'test'}
    del (redis_handler)
    redis_handler = RedisHandler(key=KEY,
                                 level=logbook.INFO,
                                 extra_fields=extra_fields,
                                 bubble=True)

    with null_handler.applicationbound():
        with redis_handler:
            logbook.info(LETTERS)

    key, message = r.blpop(KEY)
    for field in FIELDS:
        assert message.find(field)
    assert message.find('test')

    # And finally, check that fields are correctly added if appended to the
    # log message
    FIELDS.append('more_info')
    with null_handler.applicationbound():
        with redis_handler:
            logbook.info(LETTERS, more_info='This works')

    key, message = r.blpop(KEY)
    for field in FIELDS:
        assert message.find(field)
    assert message.find('This works')
예제 #7
0

from time import time

import logbook

from logbook.queues import RedisHandler

from datetime import datetime
logbook.set_datetime_format("local")

import gevent

logger = logbook.Logger('logger')

log = logbook.FileHandler('test_debug.log', level='DEBUG')

log.push_application()

log2 =  RedisHandler('127.0.0.1', port='6379', key='logQ')
#logbook.FileHandler('test_info.log')

log2.push_application()

while True:
    
    t = time()
    logger.debug("debug,timestamp:[%s]" % (t))
    logger.info("timestamp:[%s]" % (t))
    print t
    gevent.sleep(3)
예제 #8
0
파일: __init__.py 프로젝트: senthil10/bcbb
def create_log_handler(config, batch_records=False):
    log_dir = config.get("log_dir", None)
    email = config.get("email", None)
    rabbitmq = config.get("rabbitmq_logging", None)
    redis = config.get("redis_handler", None)
    handlers = []

    if log_dir:
        utils.safe_makedir(log_dir)
        handlers.append(
            logbook.FileHandler(os.path.join(log_dir, "%s.log" % LOG_NAME)))
    else:
        handlers.append(logbook.StreamHandler(sys.stdout))

    if email:
        smtp_host = config.get("smtp_host", None)
        smtp_port = config.get("smtp_port", 25)
        if smtp_host is not None:
            smtp_host = [smtp_host, smtp_port]

        email = email.split(",")
        handlers.append(
            logbook.MailHandler(
                email[0],
                email,
                server_addr=smtp_host,
                format_string=
                u'''Subject: [BCBB pipeline] {record.extra[run]} \n\n {record.message}''',
                level='INFO',
                bubble=True))
    if rabbitmq:
        from logbook.queues import RabbitMQHandler
        handlers.append(
            RabbitMQHandler(rabbitmq["url"],
                            queue=rabbitmq["log_queue"],
                            bubble=True))

    if redis:
        try:
            redis_host = config.get('redis_handler').get('host')
            redis_port = int(config.get('redis_handler').get('port'))
            redis_key = config.get('redis_handler').get('key')
            redis_password = config.get('redis_handler').get('password')
            redis_handler = RedisHandler(host=redis_host,
                                         port=redis_port,
                                         key=redis_key,
                                         password=redis_password)
            handlers.append(redis_handler)
        except:
            logger2.warn("Failed loading Redis handler, please check your \
                    configuration and the connectivity to your Redis Database")

    if config.get("debug", False):
        for handler in handlers:
            handler.level = logbook.DEBUG
        logger2.level = logbook.DEBUG

    else:
        for handler in handlers:
            handler.level = logbook.INFO

    return logbook.NestedSetup(handlers)
예제 #9
0

from time import time

import logbook

from logbook.queues import RedisHandler

from datetime import datetime
logbook.set_datetime_format("local")

import gevent

logger = logbook.Logger('logger')

log = logbook.FileHandler('test_debug.log', level='DEBUG')

log.push_application()

log2 =  RedisHandler('127.0.0.1', port=6379, key='logQ')
#logbook.FileHandler('test_info.log')

log2.push_application()

while True:
    
    t = time()
    logger.debug("debug,timestamp:[%s]" % (t))
    logger.info("timestamp:[%s]" % (t))
    print t
    gevent.sleep(3)
예제 #10
0
    #Construct the results dict
    for buffer_size in buffer_sizes:
        results['buffering'][str(buffer_size)] = OrderedDict()
        for rush in rushes:
            results['no_buffering'][str(rush)] = {}
            results['buffering'][str(buffer_size)][str(rush)] = {}

    #########################
    #                       #
    #  Tests with buffering #
    #                       #
    #########################

    for buffer_size in buffer_sizes:
        h = RedisHandler(flush_threshold=buffer_size)
        for rush in rushes:
            for execution in range(EXECUTIONS):
                with h:
                    t_start = time.time()
                    for i in range(rush):
                        l.info(MESSAGE.format(num=str(i)))
                    t_end = time.time()

                    results.get('buffering').get(str(buffer_size)).get(str(rush))[str(execution)] = \
                            str(t_end - t_start)

                #Clean up redis
                while r.keys():
                    r.blpop(KEY)