def test_bad_encoding_error_fallback(): try: StreamEmitter('stderr', errors='badvalue') except LookupError: assert True else: assert False
def test_bad_encoding(): try: StreamEmitter('stderr', encoding='nope') except LookupError: assert True else: assert False
def _test_exception(): _tmpl = ('{iso_end} - {exc_type}: {exc_message}' ' - {func_name}:{line_number} - {exc_tb_list}') sink = SensibleSink(SF(_tmpl), StreamEmitter('stderr'), filters=[SensibleFilter(exception=0)]) logger = Logger('excelsilog', [sink]) with logger.info('A for Effort', reraise=False) as tr: print tr raise ValueError('E for Exception') return
def test_stale_stream(tmpdir): # make mock filestream with write/flush that goes stale after 100 writes # create logger with stream emitter to mocked file stream class StalewardFile(file): def __init__(self, *a, **kw): file.__init__(self, *a, **kw) self._write_count = 0 def write(self, *a, **kw): self._write_count += 1 if self._write_count > 100: exc = IOError('stale file handle') exc.errno = errno.ESTALE self.close() raise exc return file.write(self, *a, **kw) file_path = '%s/not_always_fresh.log' % (tmpdir, ) stale_file_obj = StalewardFile(file_path, 'wb') emitter = StreamEmitter(stale_file_obj) sink = SensibleSink(SF('{status_char} - {iso_end}'), emitter, filters=[SensibleFilter(success=True)]) logger = Logger('excelsilog', [sink]) assert emitter.stream is stale_file_obj for i in range(200): logger.info('yay').success() lines = open(file_path).read().splitlines() assert len(lines) == 200 assert len(lines[0]) == len(lines[-1]) assert stale_file_obj.closed assert emitter.stream is not stale_file_obj
# -*- coding: utf-8 -*- from lithoxyl import (SensibleSink, SensibleFilter, SensibleFormatter as SF, SensibleMessageFormatter as SMF) from lithoxyl.emitters import StreamEmitter, AggregateEmitter from lithoxyl.logger import Logger fmtr = SF('{status_char}{begin_timestamp}') strm_emtr = StreamEmitter('stderr') fltr = SensibleFilter('debug') aggr_emtr = AggregateEmitter() strm_sink = SensibleSink(formatter=fmtr, emitter=strm_emtr) fake_sink = SensibleSink(filters=[fltr], formatter=fmtr, emitter=aggr_emtr) def test_sensible_basic(): log = Logger('test_ss', [strm_sink, fake_sink]) print log.debug('greet').success('hey') assert aggr_emtr.get_entry(-1).startswith('s') with log.debug('greet') as t: t.success('hello') t.warn("everything ok?") assert aggr_emtr.get_entry(-1).startswith('S')
pass url_log = SupportLogger('url') worker_log = SupportLogger('worker') support_log = SupportLogger('support') extra_fields = [SensibleField('current_gthread_id' , 'd', get_current_gthreadid, quote=False)] # TODO: create/attach this in context stderr_fmt = SensibleFormatter('{iso_end_local_notz} {module_path}' ' ({current_gthread_id}) - {message}', extra_fields=extra_fields) stderr_emt = StreamEmitter('stderr') stderr_sink = SensibleSink(formatter=stderr_fmt, emitter=stderr_emt, on='end') url_log.add_sink(stderr_sink) worker_log.add_sink(stderr_sink) support_log.add_sink(stderr_sink) #url_log.critical('serve_http_request').success('{method} {url}', method='GET', url='/') class LoggingContext(object): logger_type = SupportLogger def __init__(self, level=None, enable_stderr=True):