class DebugDumper(BCRelay): def __init__(self, *argz, **kwz): super(DebugDumper, self).__init__(*argz, **kwz) # Simple buffered handler that never triggers flush self.handler = BufferingHandler(capacity=self.conf.capacity) self.buffer = self.handler.buffer = deque(maxlen=self.handler.capacity) self.handler.capacity += 1 self.handler.setLevel(self.conf.level) self.handler.setFormatter( logging.Formatter(self.conf.format, self.conf.datefmt) ) logging.root.addHandler(self.handler) # Signal log-dump interface if self.conf.signal and isinstance(self.conf.signal, str): signum = getattr(signal, self.conf.signal, None) if not signum: signum = getattr(signal, 'SIG{}'.format(self.conf.signal), None) self.conf.signal = signum if self.conf.signal: def signal_handler(sig, frm): # Supress buffering of re-issued messages self.handler._emit, self.handler.emit = self.handler.emit, lambda *a, **k: None for msg in list(self.buffer): log.fatal(self.handler.format(msg)) self.handler.emit = self.handler._emit signal.signal(self.conf.signal, signal_handler) def dispatch(self, msg, source=None): if msg != self.conf.command: log.noise('Ignoring unknown command: {!r} (source: {})'.format(msg, source)) return if not self.conf.direct: source = None # reply to whatever destination channel msg = '\n'.join(it.imap(self.handler.format, list(self.buffer))) reactor.callLater( 0, self.interface.dispatch, msg, source=self, user=source, direct=True )
def __init__(self): # BufferingHandler takes a "capacity" argument # so as to know when to flush. As we're overriding # shouldFlush anyway, we can set a capacity of zero. # You can call flush() manually to clear out the # buffer. BufferingHandler.__init__(self, 0)
def __init__(self, config, level=None): BufferingHandler.__init__(self, 1000) self.config = config self.old_handlers = [] # set my formatter fmt = datefmt = None if config.logging_format: fmt = config.logging_format else: fmt = '%(levelname)s:%(name)s:%(message)s' if config.logging_datefmt: datefmt = config.logging_datefmt fmt = logging.Formatter(fmt, datefmt) self.setFormatter(fmt) # figure the level we're logging at if level is not None: self.level = level elif config.logging_level: self.level = getattr(logging, config.logging_level.upper(), None) if self.level is None: raise ConfigError('Invalid log level: "%s"' % config.logging_level) else: self.level = logging.NOTSET # construct my filter if config.logging_filter: self.addFilter(RecordFilter(config.logging_filter))
def __init__(self, config, level=None): BufferingHandler.__init__(self, 1000) self.config = config self.old_handlers = [] # set my formatter fmt = datefmt = None if config.logging_format: fmt = config.logging_format else: fmt = "%(levelname)s:%(name)s:%(message)s" if config.logging_datefmt: datefmt = config.logging_datefmt fmt = logging.Formatter(fmt, datefmt) self.setFormatter(fmt) # figure the level we're logging at if level is not None: self.level = level elif config.logging_level: self.level = getattr(logging, config.logging_level.upper(), None) if self.level is None: raise ConfigError('Invalid log level: "%s"' % config.logging_level) else: self.level = logging.NOTSET # construct my filter if config.logging_filter: self.addFilter(RecordFilter(config.logging_filter))
def __init__(self, config, level=None): BufferingHandler.__init__(self, 1000) self.config = config self.old_handlers = [] self.old_level = None # set my formatter log_format = datefmt = None if config.logging_format: log_format = config.logging_format else: log_format = '%(levelname)s:%(name)s:%(message)s' if config.logging_datefmt: datefmt = config.logging_datefmt formatter = logging.Formatter(log_format, datefmt) self.setFormatter(formatter) # figure the level we're logging at if level is not None: self.level = level elif config.logging_level: self.level = config.logging_level else: self.level = logging.NOTSET # construct my filter if config.logging_filter: self.addFilter(RecordFilter(config.logging_filter))
def __init__(self, indexed_keys={'level', 'short_message'}, debugging_fields=True, extra_fields=True, localname=None, measurement=None, level_names=False, capacity=64, flush_interval=5, backpop=True, **client_kwargs): self.debugging_fields = debugging_fields self.extra_fields = extra_fields self.localname = localname self.measurement = measurement self.level_names = level_names self.indexed_keys = indexed_keys self.client = InfluxDBClient(**client_kwargs) self.flush_interval = flush_interval self._thread = None if flush_interval is None else threading.Thread( target=self._flush_thread, name="BufferingInfluxHandler", daemon=True) InfluxHandler.__init__(self, indexed_keys=None, debugging_fields=debugging_fields, extra_fields=extra_fields, localname=localname, measurement=measurement, level_names=level_names, backpop=backpop, **client_kwargs) BufferingHandler.__init__(self, capacity) self._thread.start()
def __init__(self, config, level=None): BufferingHandler.__init__(self, 1000) self.config = config self.old_handlers = [] self.old_level = None # set my formatter fmt = datefmt = None if config.logging_format: fmt = config.logging_format else: fmt = '%(levelname)s:%(name)s:%(message)s' if config.logging_datefmt: datefmt = config.logging_datefmt fmt = logging.Formatter(fmt, datefmt) self.setFormatter(fmt) # figure the level we're logging at if level is not None: self.level = level elif config.logging_level: self.level = config.logging_level else: self.level = logging.NOTSET # construct my filter if config.logging_filter: self.addFilter(RecordFilter(config.logging_filter))
def __init__(self, target, thresholdLevel, retainLevel): BufferingHandler.__init__(self, capacity=0) self.target = target self.thresholdLevel = thresholdLevel if retainLevel is None: retainLevel = thresholdLevel self.setLevel(retainLevel)
def __init__(self, testcase): """Create a logging handler for the given test case. Args: testcase (unittest.TestCase): Owner of this logging handler. """ BufferingHandler.__init__(self, capacity=0) self.setLevel(logging.DEBUG) self.testcase = testcase
def __init__(self, user, password, email_to, email_subject, email_from=None, capacity=10000, host='smtp.gmail.com', port=465): BufferingHandler.__init__(self, capacity) self.host = host self.port = port self.user = user self.password = password self.email_from = user if email_from is None else email_from self.email_to = email_to self.email_subject = email_subject
def close(self): """ Flush, set the target to None and lose the buffer. """ try: self.flush() finally: self.acquire() try: BufferingHandler.close(self) finally: self.release()
def create_session_logger(format="CPC"): logger = logging.getLogger("") try: logger.addHandler(HANDLER["%s-buffer" % format]) except KeyError: _formatter = logging.Formatter(format) handl = BufferingHandler(10000) handl.setFormatter(_formatter) logger.addHandler(handl) logger.setLevel(logging.INFO) return logger
def __init__(self, level, capacity, host, port, fromaddr, toaddrs, user=None, passwd=None, subject='logs', encoding='utf-8'): BufferingHandler.__init__(self, capacity) self.setLevel(level) self.host = host self.port = port self.fromaddr = fromaddr self.toaddrs = toaddrs if isinstance(toaddrs, (list, tuple)) else [toaddrs] self.user = user self.passwd = passwd self.subject = Header(subject) self.encoding = encoding
def __init__(self, capacity, fd_target): """ :param int capacity: Amount of records to store in memory https://github.com/python/cpython/blob/3.3/Lib/logging/handlers.py#L1161-L1176 :param object fd_target: File descriptor to write output to (e.g. `sys.stdout`) """ # Call our BufferingHandler init if issubclass(BufferingTargetHandler, object): super(BufferingTargetHandler, self).__init__(capacity) else: BufferingHandler.__init__(self, capacity) # Save target for later self._fd_target = fd_target
def create_session_logger(log_format="CPC"): global HANDLER logger = logging.getLogger("") try: logger.addHandler(HANDLER["%s-buffer" % log_format]) except KeyError: _formatter = logging.Formatter(log_format) handl = BufferingHandler(10000) handl.setFormatter(_formatter) logger.addHandler(handl) logger.setLevel(logging.INFO) return logger
def test_barcode_del(self): for err1, err2, expect_state in [ (_ERR_404, "", LOCK_STATE_FREE), ("", _ERR_404, LOCK_STATE_FREE), (_ERR_404, _ERR_404, LOCK_STATE_FREE), (_ERR_404, "Something", LOCK_STATE_BATCH), ("", "Something", LOCK_STATE_FREE), ("Something", "Something", LOCK_STATE_BATCH), ]: mocker = MockDBPC() reestr = {"db_reestr_id": 1, "db_locked": LOCK_STATE_BATCH} letter = {"db_letter_id": 1, "db_locked": LOCK_STATE_BATCH, "id": 10, "db_reestr_id": 1} log = logging.getLogger("postall") hndl = BufferingHandler(10) log.addHandler(hndl) mocker.add_retval("get_reestr_info", 0, (reestr, "")) mocker.add_retval("remove_backlogs",1, [(10, err1)]) mocker.add_retval("remove_backlogs_from_shipment",1, [(10, err2)]) mocker.add_retval("modify_letter",1, (True, "")) barcode_del(mocker, mocker, reestr, letter) for func, args in mocker.logs: if func[0] == "modify_letter": if args[0]["db_locked"] != expect_state: self.fail(u"test_barcode_del: unexpected (%s, %s, %d) <> (%d)" %\ (err1, err2, expect_state, args[0]["db_locked"]))
def setUp(self): super(ServiceOutputParserTest, self).setUp() self.transport = StringTransport() self.handler = BufferingHandler(2) self.useFixture(LogHandler(self.handler)) self.parser = ServiceOutputParser() self.parser.setServiceName("my-app") self.parser.makeConnection(self.transport)
def process_request(self, request): if (settings.DEBUG or request.user.is_superuser) and request.REQUEST.has_key('query'): self.time_started = time.time() self.sql_offset_start = len(connection.queries) if not hasattr(self, 'loghandler'): self.loghandler = BufferingHandler(1000) # Create and a handler logging.getLogger('').addHandler(self.loghandler) else: self.loghandler.flush() # Empty it of all messages
class QueryMiddleware: def process_request(self, request): if (settings.DEBUG or request.user.is_superuser) and request.REQUEST.has_key('query'): self.time_started = time.time() self.sql_offset_start = len(connection.queries) if not hasattr(self, 'loghandler'): self.loghandler = BufferingHandler(1000) # Create and a handler logging.getLogger('').addHandler(self.loghandler) else: self.loghandler.flush() # Empty it of all messages def process_response(self, request, response): if (settings.DEBUG or request.user.is_superuser) and request.REQUEST.has_key('query'): sql_queries = connection.queries[self.sql_offset_start:] # Reformat sql queries a bit sql_total = 0.0 for query in sql_queries: query['sql'] = reformat_sql(query['sql']) sql_total += float(query['time']) # Count the most-executed queries most_executed = {} for query in sql_queries: reformatted = reformat_sql(query['sql_no_params']) most_executed.setdefault(reformatted, []).append(query) most_executed = most_executed.items() most_executed.sort(key = lambda v: len(v[1]), reverse=True) most_executed = most_executed[:10] template_context = Context({ 'sql': sql_queries, 'sql_total': sql_total, 'bad_sql_count': len([s for s in sql_queries if s['bad']]), 'most_executed': most_executed, 'server_time': time.time() - self.time_started, }) response.content = Template(TEMPLATE).render(template_context) return response
def close(self): """Upon `close`, flush our internal info to the target""" # Flush our buffers to the target # https://github.com/python/cpython/blob/3.3/Lib/logging/handlers.py#L1185 # https://github.com/python/cpython/blob/3.3/Lib/logging/handlers.py#L1241-L1256 self.acquire() try: for record in self.buffer: if record.levelno < self.level: continue msg = self.format(record) print(msg, file=self._fd_target) finally: self.release() # Then, run our normal close actions if issubclass(BufferingTargetHandler, object): super(BufferingTargetHandler, self).close() else: BufferingHandler.close(self)
def create_logger(filename, base_dir): """ Creates a logger with a given filename. :param filename: File name for the log :return: A logger class. """ logger = logging.getLogger("") logfile_name = base_dir + filename handler = logging.FileHandler(logfile_name) base_formatter = logging.Formatter( "%(asctime)s %(name)s:%(levelname)s %(message)s") cpc = ('%(asctime)s %(name)s:%(levelname)s ' '[%(client)s,%(path)s,%(cid)s] %(message)s') handler.setFormatter(base_formatter) logger.addHandler(handler) logger.setLevel(logging.DEBUG) _formatter = logging.Formatter(cpc) fil_handler = logging.FileHandler(logfile_name) fil_handler.setFormatter(_formatter) buf_handler = BufferingHandler(10000) buf_handler.setFormatter(_formatter) return logger
def __init__(self, capacity, wait_on_flush=False, ch_conn='http://localhost:8123', ch_table=None, logging_build_in_columns_to_ch=None): """ Initialize the handler with the buffer size, the level at which flushing should occur and an optional target. Note that without a target being set either here or via setTarget(), a MemoryHandler is no use to anyone! """ BufferingHandler.__init__(self, capacity) self.wait_on_flush = wait_on_flush if not ch_table: raise ValueError('ch_table must be provided') self.ch_table = ch_table self.ch_conn = ch_conn self.build_in_keys_to_ch = logging_build_in_columns_to_ch or ['message', 'levelname', 'filename', 'module', 'lineno,', 'exc_info', 'created', 'msecs', 'relativeCreated', 'asctime'] self.build_in_log_keys = ['name', 'msg', 'args', 'levelname', 'levelno', 'pathname', 'filename', 'module', 'exc_info', 'exc_text', 'stack_info', 'lineno', 'funcName', 'created', 'msecs', 'relativeCreated', 'thread', 'threadName', 'processName', 'process']
class TemplateMiddleware: def process_request(self, request): if (settings.DEBUG or request.user.is_superuser) and request.REQUEST.has_key('template'): self.time_started = time.time() self.templates_used = [] self.contexts_used = [] if not hasattr(self, 'loghandler'): self.loghandler = BufferingHandler(1000) # Create and a handler logging.getLogger('').addHandler(self.loghandler) else: self.loghandler.flush() # Empty it of all messages template_rendered.connect( self._storeRenderedTemplates ) def process_response(self, request, response): if (settings.DEBUG or request.user.is_superuser) and request.REQUEST.has_key('template'): templates = [ (t.name, t.origin and t.origin.name or 'No origin') for t in self.templates_used ] template_context = Context({ 'server_time': time.time() - self.time_started, 'templates': templates, 'template_dirs': settings.TEMPLATE_DIRS, }) response.content = Template(TEMPLATE).render(template_context) return response def _storeRenderedTemplates(self, signal, sender, template, context, **kwargs): self.templates_used.append(template) self.contexts_used.append(context)
def setUp(self): self.corba_object = Mock(spec=['method']) # Set the default result self.corba_object.method.return_value = sentinel.result self.corba_client = CorbaClient(self.corba_object, SentinelRecoder(), InternalServerError) # Mock logging self.log_handler = BufferingHandler(10) logger = logging.getLogger('pyfco.client') patcher = patch.object(logger, 'handlers', [self.log_handler]) self.addCleanup(patcher.stop) patcher.start() self.addCleanup(logger.setLevel, logger.level) logger.setLevel(logging.DEBUG)
def process_request(self, request): if (settings.DEBUG or request.user.is_superuser) and request.REQUEST.has_key('template'): self.time_started = time.time() self.templates_used = [] self.contexts_used = [] if not hasattr(self, 'loghandler'): self.loghandler = BufferingHandler(1000) # Create and a handler logging.getLogger('').addHandler(self.loghandler) else: self.loghandler.flush() # Empty it of all messages template_rendered.connect( self._storeRenderedTemplates )
def create_logger(filename): """ Creates a logger with a given filename. :param filename: File name for the log :return: A logger class. """ logger = logging.getLogger("") LOGFILE_NAME = filename hdlr = logging.FileHandler(LOGFILE_NAME) base_formatter = logging.Formatter( "%(asctime)s %(name)s:%(levelname)s %(message)s") CPC = ('%(asctime)s %(name)s:%(levelname)s ' '[%(client)s,%(path)s,%(cid)s] %(message)s') cpc_formatter = logging.Formatter(CPC) hdlr.setFormatter(base_formatter) logger.addHandler(hdlr) logger.setLevel(logging.DEBUG) _formatter = logging.Formatter(CPC) fil_handl = logging.FileHandler(LOGFILE_NAME) fil_handl.setFormatter(_formatter) buf_handl = BufferingHandler(10000) buf_handl.setFormatter(_formatter) return logger
def logger_factory(logtype='syslog', logfile=None, level='WARNING', logid='PXE', format=None): # this code has been copied from Trac (MIT modified license) logger = getLogger(logid) logtype = logtype.lower() if logtype == 'file': hdlr = FileHandler(logfile) elif logtype in ('winlog', 'eventlog', 'nteventlog'): # Requires win32 extensions hdlr = NTEventLogHandler(logid, logtype='Application') elif logtype in ('syslog', 'unix'): hdlr = SysLogHandler('/dev/log') elif logtype in ('stderr'): hdlr = StreamHandler(stderr) elif logtype in ('stdout'): hdlr = StreamHandler(stdout) else: hdlr = BufferingHandler(0) if not format: format = 'PXE[%(module)s] %(levelname)s: %(message)s' if logtype in ('file', 'stderr'): format = '%(asctime)s ' + format datefmt = '' if logtype == 'stderr': datefmt = '%X' level = level.upper() if level in ('DEBUG', 'ALL'): logger.setLevel(DEBUG) elif level == 'INFO': logger.setLevel(INFO) elif level == 'ERROR': logger.setLevel(ERROR) elif level == 'CRITICAL': logger.setLevel(CRITICAL) else: logger.setLevel(WARNING) formatter = Formatter(format, datefmt) hdlr.setFormatter(formatter) logger.addHandler(hdlr) return logger
def __init__(self, *, config=None, read_only=True, debug=True, echo=False): # Read-only for testing self.__read_only = read_only # Configuration self.__config = self._create_config([config]) # Logging logger = self.get_property(self._SECTION, 'log_path', 'logs/cryptowelder.log') formatter = Formatter( '[%(asctime)-15s][%(levelname)-5s][%(name)s] %(message)s') self.__stream_handler = StreamHandler() self.__stream_handler.setFormatter(formatter) self.__stream_handler.setLevel(DEBUG if debug else INFO) self.__rotate_handler = TimedRotatingFileHandler( logger, when=self.get_property(self._SECTION, 'log_roll', 'D'), backupCount=int(self.get_property( self._SECTION, 'log_bkup', 7))) if path.exists( path.dirname(logger)) else BufferingHandler(64) self.__rotate_handler.setFormatter(formatter) self.__rotate_handler.setLevel(DEBUG) self.__logger = self.get_logger(self) self.__logger.info('Logger : %s', logger) self.__logger.info('Config : %s', config) # Database database = self.get_property(self._SECTION, 'database', 'sqlite:///:memory:') self.__engine = create_engine(database, echo=echo) self.__session = scoped_session(sessionmaker(bind=self.__engine)) self.__logger.info('Database : %s (read_only=%s)', database, read_only) # Cache self.__nonce_lock = defaultdict(lambda: Lock()) self.__nonce_time = {}
def __init__(self, *argz, **kwz): super(DebugDumper, self).__init__(*argz, **kwz) # Simple buffered handler that never triggers flush self.handler = BufferingHandler(capacity=self.conf.capacity) self.buffer = self.handler.buffer = deque(maxlen=self.handler.capacity) self.handler.capacity += 1 self.handler.setLevel(self.conf.level) self.handler.setFormatter( logging.Formatter(self.conf.format, self.conf.datefmt) ) logging.root.addHandler(self.handler) # Signal log-dump interface if self.conf.signal and isinstance(self.conf.signal, str): signum = getattr(signal, self.conf.signal, None) if not signum: signum = getattr(signal, 'SIG{}'.format(self.conf.signal), None) self.conf.signal = signum if self.conf.signal: def signal_handler(sig, frm): # Supress buffering of re-issued messages self.handler._emit, self.handler.emit = self.handler.emit, lambda *a, **k: None for msg in list(self.buffer): log.fatal(self.handler.format(msg)) self.handler.emit = self.handler._emit signal.signal(self.conf.signal, signal_handler)
def __init__(self, capacity=None): _BufferingHandler.__init__(self, capacity=capacity)
def __init__(self): BufferingHandler.__init__(self, sys.maxint)
class WorkerTest(unittest.TestCase): def setUp(self): def set_ev(fu): def new_fu(*args, **kwargs): s = args[0] s.event.set() s.val = (args, kwargs) return fu(*args, **kwargs) return new_fu class ATestWorker(Worker): def __init__(self, name, message_queue): Worker.__init__(self, name, message_queue) self.event = Event() self.val = None self.started = False self.stopped = False @local_thread @set_ev def echo(self, val): return val @local_thread_blocking @set_ev def echo_block(self, val): return val def onStart(self): self.started = True def onStop(self): self.stopped = True @local_thread def raise_(self, ex): raise ex @local_thread_blocking def raise_blocking(self, ex): raise ex @set_ev def call_me_by_name(self, arg1, arg2): return def call_me_by_name_blocking(self, arg1, arg2): return arg1, arg2 self.buha = BufferingHandler(10000) q = Queue() self.q = q NAME = "Test" l = logging.getLogger(NAME) self.w = ATestWorker(NAME, q) self.assertEqual(self.w.log(), l) l.propagate = 0 l.addHandler(self.buha) self.assertFalse(self.w.started) self.w.start() sleep(0.05) self.assertTrue(self.w.started) def testName(self): assert(self.w.name() == "Test") def testMessageQueue(self): assert(self.w.message_queue() == self.q) def testLocalThread(self): s = "Testing" self.w.event.clear() self.w.echo(s) self.w.event.wait(5) args, kwargs = self.w.val assert(args[1] == s) def testLocalThreadException(self): self.buha.flush() self.w.raise_(Exception()) sleep(0.1) # hard delay assert(len(self.buha.buffer) != 0) assert(self.buha.buffer[0].levelno == ERROR) def testCallByName(self): self.w.event.clear() self.w.call_by_name(self.w, "call_me_by_name", "arg1", arg2="arg2") self.w.event.wait(5) args, kwargs = self.w.val assert(args[1] == "arg1") assert(kwargs["arg2"] == "arg2") def testLocalThreadBlocking(self): s = "Testing" assert(s == self.w.echo_block(s)) def testLocalThreadExceptionBlocking(self): class TestException(Exception): pass self.assertRaises(TestException, self.w.raise_blocking, TestException()) def testCallByNameBlocking(self): arg1, arg2 = self.w.call_by_name_blocking(self.w, "call_me_by_name_blocking", "arg1", arg2="arg2") assert(arg1 == "arg1") assert(arg2 == "arg2") def tearDown(self): assert(self.w.stopped == False) self.w.stop() self.w.join(5) assert(self.w.stopped == True)
def __init__(self): BufferingHandler.__init__(self, maxsize)
def handler() -> Handler: return BufferingHandler(100)
def emit(self, record): # take a snapshot of the potentially mutable arguments record.msg = record.getMessage() record.args = {} BufferingHandler.emit(self, record)
def __init__(self): BufferingHandler.__init__(self, 0)
def __init__(self, capacity=1000000, flushLevel=logging.ERROR, target=None): BufferingHandler.__init__(self, capacity) self.target = target logger = logging.getLogger('AuditTrail') logger.info('AuditTrail.TransactionEndHandler> Registering with TM') transaction.manager.registerGlobalSynch(self)
def __init__(self, widget): BufferingHandler.__init__(self, 100) self.widget = widget
def __init__(self, capacity = None): _BufferingHandler.__init__(self, capacity = capacity)
def __init__(self, capacity, logformat, logdatefmt, filters): BufferingHandler.__init__(self, capacity) fmt = logging.Formatter(logformat, logdatefmt) self.setFormatter(fmt) self.filterset = FilterSet(filters)
base_formatter = logging.Formatter( "%(asctime)s %(name)s:%(levelname)s %(message)s") CPC = ('%(asctime)s %(name)s:%(levelname)s ' '[%(client)s,%(path)s,%(cid)s] %(message)s') cpc_formatter = logging.Formatter(CPC) hdlr.setFormatter(base_formatter) LOGGER.addHandler(hdlr) LOGGER.setLevel(logging.DEBUG) _formatter = logging.Formatter(CPC) fil_handl = logging.FileHandler(LOGFILE_NAME) fil_handl.setFormatter(_formatter) buf_handl = BufferingHandler(10000) buf_handl.setFormatter(_formatter) HANDLER = {"CPC-file": fil_handl, "CPC-buffer": buf_handl} ACTIVE_HANDLER = "BASE" URLMAP = {} NAME = "pyoic" OAS = None PASSWD = { "diana": "krall", "babs": "howes", "upper": "crust", "rohe0002": "StevieRay",
def setUp(self): def set_ev(fu): def new_fu(*args, **kwargs): s = args[0] s.event.set() s.val = (args, kwargs) return fu(*args, **kwargs) return new_fu class ATestWorker(Worker): def __init__(self, name, message_queue): Worker.__init__(self, name, message_queue) self.event = Event() self.val = None self.started = False self.stopped = False @local_thread @set_ev def echo(self, val): return val @local_thread_blocking @set_ev def echo_block(self, val): return val def onStart(self): self.started = True def onStop(self): self.stopped = True @local_thread def raise_(self, ex): raise ex @local_thread_blocking def raise_blocking(self, ex): raise ex @set_ev def call_me_by_name(self, arg1, arg2): return def call_me_by_name_blocking(self, arg1, arg2): return arg1, arg2 self.buha = BufferingHandler(10000) q = Queue() self.q = q NAME = "Test" l = logging.getLogger(NAME) self.w = ATestWorker(NAME, q) self.assertEqual(self.w.log(), l) l.propagate = 0 l.addHandler(self.buha) self.assertFalse(self.w.started) self.w.start() sleep(0.05) self.assertTrue(self.w.started)
def __init__(self): # Capacity is zero, as we won't rely on # it when deciding when to flush data BufferingHandler.__init__(self, 0)
def flush(self): """ Clears out the `buffer` and `formatted` attributes. """ BufferingHandler.flush(self) self.formatted = []
hdlr = logging.FileHandler(LOGFILE_NAME) base_formatter = logging.Formatter( "%(asctime)s %(name)s:%(levelname)s %(message)s") CPC = '%(asctime)s %(name)s:%(levelname)s [%(client)s,%(path)s,%(cid)s] %(message)s' cpc_formatter = logging.Formatter(CPC) hdlr.setFormatter(base_formatter) LOGGER.addHandler(hdlr) LOGGER.setLevel(logging.DEBUG) _formatter = logging.Formatter(CPC) fil_handl = logging.FileHandler(LOGFILE_NAME) fil_handl.setFormatter(_formatter) buf_handl = BufferingHandler(10000) buf_handl.setFormatter(_formatter) HANDLER = {"CPC-file": fil_handl, "CPC-buffer": buf_handl} ACTIVE_HANDLER = "BASE" URLMAP = {} NAME = "pyoic" OAS = None PASSWD = [("diana", "krall"), ("babs", "howes"), ("upper", "crust")] #noinspection PyUnusedLocal def devnull(txt):
def __init__(self, bucket, prefix=''): BufferingHandler.__init__(self, 0) self.bucket = bucket self.prefix = prefix
class WorkerTest(unittest.TestCase): def setUp(self): def set_ev(fu): def new_fu(*args, **kwargs): s = args[0] s.event.set() s.val = (args, kwargs) return fu(*args, **kwargs) return new_fu class ATestWorker(Worker): def __init__(self, name, message_queue): Worker.__init__(self, name, message_queue) self.event = Event() self.val = None self.started = False self.stopped = False @local_thread @set_ev def echo(self, val): return val @local_thread_blocking @set_ev def echo_block(self, val): return val def onStart(self): self.started = True def onStop(self): self.stopped = True @local_thread def raise_(self, ex): raise ex @local_thread_blocking def raise_blocking(self, ex): raise ex @set_ev def call_me_by_name(self, arg1, arg2): return def call_me_by_name_blocking(self, arg1, arg2): return arg1, arg2 self.buha = BufferingHandler(10000) q = Queue() self.q = q NAME = "Test" l = logging.getLogger(NAME) self.w = ATestWorker(NAME, q) self.assertEqual(self.w.log(), l) l.propagate = 0 l.addHandler(self.buha) self.assertFalse(self.w.started) self.w.start() sleep(0.05) self.assertTrue(self.w.started) def testName(self): assert (self.w.name() == "Test") def testMessageQueue(self): assert (self.w.message_queue() == self.q) def testLocalThread(self): s = "Testing" self.w.event.clear() self.w.echo(s) self.w.event.wait(5) args, kwargs = self.w.val assert (args[1] == s) def testLocalThreadException(self): self.buha.flush() self.w.raise_(Exception()) sleep(0.1) # hard delay assert (len(self.buha.buffer) != 0) assert (self.buha.buffer[0].levelno == ERROR) def testCallByName(self): self.w.event.clear() self.w.call_by_name(self.w, "call_me_by_name", "arg1", arg2="arg2") self.w.event.wait(5) args, kwargs = self.w.val assert (args[1] == "arg1") assert (kwargs["arg2"] == "arg2") def testLocalThreadBlocking(self): s = "Testing" assert (s == self.w.echo_block(s)) def testLocalThreadExceptionBlocking(self): class TestException(Exception): pass self.assertRaises(TestException, self.w.raise_blocking, TestException()) def testCallByNameBlocking(self): arg1, arg2 = self.w.call_by_name_blocking(self.w, "call_me_by_name_blocking", "arg1", arg2="arg2") assert (arg1 == "arg1") assert (arg2 == "arg2") def tearDown(self): assert (self.w.stopped == False) self.w.stop() self.w.join(5) assert (self.w.stopped == True)
import logging import sys from logging import Formatter from logging.handlers import BufferingHandler, RotatingFileHandler, SysLogHandler from typing import Any, Dict from freqtrade.exceptions import OperationalException logger = logging.getLogger(__name__) LOGFORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' # Initialize bufferhandler - will be used for /log endpoints bufferHandler = BufferingHandler(1000) bufferHandler.setFormatter(Formatter(LOGFORMAT)) def _set_loggers(verbosity: int = 0, api_verbosity: str = 'info') -> None: """ Set the logging level for third party libraries :return: None """ logging.getLogger('requests').setLevel( logging.INFO if verbosity <= 1 else logging.DEBUG ) logging.getLogger("urllib3").setLevel( logging.INFO if verbosity <= 1 else logging.DEBUG ) logging.getLogger('ccxt.base.exchange').setLevel( logging.INFO if verbosity <= 2 else logging.DEBUG
def __init__(self): # Capacity is zero, as we won't rely on it when deciding when to flush # data BufferingHandler.__init__(self, 0)