class Log_usingThread(BaseLog): def __init__(self, logger): # DELAYED LOAD FOR THREADS MODULE from pyLibrary.thread.threads import Queue self.queue = Queue("logs", max=10000, silent=True) self.logger = logger def worker(please_stop): while not please_stop: Thread.sleep(1) logs = self.queue.pop_all() for log in logs: if log is Thread.STOP: if DEBUG_LOGGING: sys.stdout.write("Log_usingThread.worker() sees stop, filling rest of queue\n") please_stop.go() else: self.logger.write(**log) self.thread = Thread("log thread", worker) self.thread.start() def write(self, template, params): try: self.queue.add({"template": template, "params": params}) return self except Exception, e: sys.stdout.write("IF YOU SEE THIS, IT IS LIKELY YOU FORGOT TO RUN Log.start() FIRST\n") raise e # OH NO!
class TextLog_usingElasticSearch(TextLog): @use_settings def __init__(self, host, index, type="log", max_size=1000, batch_size=100, settings=None): """ settings ARE FOR THE ELASTICSEARCH INDEX """ self.es = Cluster(settings).get_or_create_index( schema=convert.json2value(convert.value2json(SCHEMA), leaves=True), limit_replicas=True, tjson=True, settings=settings, ) self.batch_size = batch_size self.es.add_alias(coalesce(settings.alias, settings.index)) self.queue = Queue("debug logs to es", max=max_size, silent=True) self.es.settings.retry.times = coalesce(self.es.settings.retry.times, 3) self.es.settings.retry.sleep = Duration(coalesce(self.es.settings.retry.sleep, MINUTE)) Thread.run("add debug logs to es", self._insert_loop) def write(self, template, params): if params.get("template"): # DETECTED INNER TEMPLATE, ASSUME TRACE IS ON, SO DO NOT NEED THE OUTER TEMPLATE self.queue.add({"value": params}) else: template = strings.limit(template, 2000) self.queue.add({"value": {"template": template, "params": params}}, timeout=3 * MINUTE) return self def _insert_loop(self, please_stop=None): bad_count = 0 while not please_stop: try: Thread.sleep(seconds=1) messages = wrap(self.queue.pop_all()) if messages: # for m in messages: # m.value.params = leafer(m.value.params) # m.value.error = leafer(m.value.error) for g, mm in jx.groupby(messages, size=self.batch_size): self.es.extend(mm) bad_count = 0 except Exception, e: Log.warning("Problem inserting logs into ES", cause=e) bad_count += 1 if bad_count > 5: break Log.warning("Given up trying to write debug logs to ES index {{index}}", index=self.es.settings.index) # CONTINUE TO DRAIN THIS QUEUE while not please_stop: try: Thread.sleep(seconds=1) self.queue.pop_all() except Exception, e: Log.warning("Should not happen", cause=e)
class TextLog_usingElasticSearch(TextLog): @use_settings def __init__(self, host, index, type="log", max_size=1000, batch_size=100, settings=None): """ settings ARE FOR THE ELASTICSEARCH INDEX """ self.es = Cluster(settings).get_or_create_index( schema=convert.json2value(convert.value2json(SCHEMA), leaves=True), limit_replicas=True, tjson=True, settings=settings ) self.batch_size = batch_size self.es.add_alias(coalesce(settings.alias, settings.index)) self.queue = Queue("debug logs to es", max=max_size, silent=True) self.es.settings.retry.times = coalesce(self.es.settings.retry.times, 3) self.es.settings.retry.sleep = Duration(coalesce(self.es.settings.retry.sleep, MINUTE)) Thread.run("add debug logs to es", self._insert_loop) def write(self, template, params): if params.get("template"): # DETECTED INNER TEMPLATE, ASSUME TRACE IS ON, SO DO NOT NEED THE OUTER TEMPLATE self.queue.add({"value": params}) else: template = strings.limit(template, 2000) self.queue.add({"value": {"template": template, "params": params}}, timeout=3 * MINUTE) return self def _insert_loop(self, please_stop=None): bad_count = 0 while not please_stop: try: Thread.sleep(seconds=1) messages = wrap(self.queue.pop_all()) if messages: # for m in messages: # m.value.params = leafer(m.value.params) # m.value.error = leafer(m.value.error) for g, mm in jx.groupby(messages, size=self.batch_size): self.es.extend(mm) bad_count = 0 except Exception, e: Log.warning("Problem inserting logs into ES", cause=e) bad_count += 1 if bad_count > 5: break Log.warning("Given up trying to write debug logs to ES index {{index}}", index=self.es.settings.index) # CONTINUE TO DRAIN THIS QUEUE while not please_stop: try: Thread.sleep(seconds=1) self.queue.pop_all() except Exception, e: Log.warning("Should not happen", cause=e)
class Log_usingThread(BaseLog): def __init__(self, logger): # DELAYED LOAD FOR THREADS MODULE from pyLibrary.thread.threads import Queue self.queue = Queue("logs", max=10000, silent=True) self.logger = logger def worker(please_stop): while not please_stop: Thread.sleep(1) logs = self.queue.pop_all() for log in logs: if log is Thread.STOP: if DEBUG_LOGGING: sys.stdout.write( "Log_usingThread.worker() sees stop, filling rest of queue\n" ) please_stop.go() else: self.logger.write(**log) self.thread = Thread("log thread", worker) self.thread.parent.remove_child( self.thread) # LOGGING WILL BE RESPONSIBLE FOR THREAD stop() self.thread.start() def write(self, template, params): try: self.queue.add({"template": template, "params": params}) return self except Exception, e: sys.stdout.write( "IF YOU SEE THIS, IT IS LIKELY YOU FORGOT TO RUN Log.start() FIRST\n" ) raise e # OH NO!