def __init__(self, logger_name: str = 'rekcurd_dashboard', log_level: int = logging.NOTSET) -> None: """ constructor :param logger_name: logger name :param log_level: """ super().__init__() self.log = logging.getLogger(logger_name) self.log.setLevel(log_level) custom_format = { 'host': gethostname(), 'short_message': '%(message)s', 'timestamp': '%(created)d.%(msecs)d', 'level': '%(loglevel)d' } fluent_handler = handler.FluentHandler('rekcurd_dashboard') formatter = handler.FluentRecordFormatter(custom_format) fluent_handler.setFormatter(formatter) fluent_handler.setLevel(log_level) self.log.handlers = [] self.log.addHandler(fluent_handler)
def add_fluentd(self): """ Adds a handler for fluentd if the hostname has been defined """ try: if self.fluentd_hostname: try: if self._handlers["fluentd"]: self._handlers["fluentd"].close() except KeyError: self._handlers["fluentd"] = None print("sending logs to fluentd at: {}".format(( self.fluentd_tag, self.fluentd_record, self.fluentd_hostname, self.fluentd_port, ))) self._handlers["fluentd"] = fluent_handler.FluentHandler( "{}.{}".format(self.fluentd_tag, self.fluentd_record), host=self.fluentd_hostname, port=self.fluentd_port, ) fluentd_formatter = fluent_handler.FluentRecordFormatter( self.format("fluentd")) self._handlers["fluentd"].setFormatter(fluentd_formatter) self._logger.addHandler(self._handlers["fluentd"]) self._logger.addFilter(ContextFilter()) except AttributeError: self._logger.warning( "Fluentd hostname not defined in settings. Skipping!")
def __init__(self, tag, ip, port, loggerName="fluent.test"): logging.basicConfig(level=logging.INFO) self._logger = logging.getLogger(loggerName) self._loggerHandler = handler.FluentHandler(tag, host=ip, port=port) self._formatter = handler.FluentRecordFormatter(customFormat) self._loggerHandler.setFormatter(self._formatter) self._logger.addHandler(self._loggerHandler)
def handle(): logging.basicConfig(level=logging.INFO) l = logging.getLogger('fluent.handle') h = handler.FluentHandler('app', host='127.0.0.1', port=24224) formatter = handler.FluentRecordFormatter(custom_format) h.setFormatter(formatter) l.addHandler(h) l.info({'from': 'CC', 'to': 'DD'})
def configure_logger(self, event): config = inject.instance(applauncher.kernel.Configuration) logger_config = config.fluent.log_handler if logger_config.enabled: h = handler.FluentHandler(config.fluent.tag, host=logger_config.host, port=logger_config.port) formatter = handler.FluentRecordFormatter( json.loads(logger_config.format.replace("'", '"'))) h.setFormatter(formatter) self.log_handlers.append(h)
def get_logger(name, fluentd_host='localhost', fluentd_port=24224): """Get a Python logger instance which forwards logs to Fluentd.""" logger = logging.getLogger(name) fluent_handler = handler.FluentHandler( 'mole.logs', host=fluentd_host, port=fluentd_port, buffer_overflow_handler=overflow_handler) formatter = handler.FluentRecordFormatter(custom_format, format_json=False) fluent_handler.setFormatter(formatter) logger.addHandler(fluent_handler) return logger
def kafka_logger(log_name): line_format = { 'file_name': '%(module)s', 'log_level': '%(levelname)s', 'line': '%(lineno)d' } files_handle = handler.FluentHandler(log_name, host='10.19.47.136', port=54224) formatter = handler.FluentRecordFormatter(line_format) files_handle.setFormatter(formatter) logger = logging.getLogger(log_name) logger.setLevel(logging.INFO) logger.addHandler(files_handle) return logger
def init(self, prop): self.logger = logging.getLogger("fluent") self.handlers = [] if FluentBit.s_logger is None: FluentBit.s_logger = self logging.PARANOID = logging.DEBUG - 3 logging.VERBOSE = logging.DEBUG - 2 logging.TRACE = logging.DEBUG - 1 logging.FATAL = logging.ERROR + 1 logging.addLevelName(logging.PARANOID, "PARANOID") logging.addLevelName(logging.VERBOSE, "VERBOSE") logging.addLevelName(logging.TRACE, "TRACE") logging.addLevelName(logging.FATAL, "FATAL") leaf0 = prop.getLeaf() for l in leaf0: key = l.getName() if key.find("output") != -1: formatter = handler.FluentRecordFormatter() tag = l.getProperty("tag") if tag == "": return False host = l.getProperty("host") if host == "": host = "127.0.0.1" port = l.getProperty("port") try: port = int(port) except: port = 24224 fhdlr = handler.FluentHandler(tag, host=host, port=port) fmt = { "time": "%(asctime)s", "name": "%(name)s", "level": "%(levelname)s", } formatter = handler.FluentRecordFormatter(fmt=fmt) #formatter = logging.Formatter('{Time:%(asctime)s,Name:%(name)s,LEVEL:%(levelname)s,MESSAGE:%(message)s}') fhdlr.setFormatter(formatter) self.handlers.append(fhdlr) self.logger.addHandler(fhdlr) self.logger.setLevel(logging.INFO) return True
def __init_fluent_handler(self, application_name: str, service_level: str, log_level: int): custom_format = { 'host': gethostname(), 'short_message': '%(message)s', 'timestamp': '%(created)d.%(msecs)d', 'level': '%(loglevel)d', 'service': 'rekcurd', 'ml_service': application_name, 'service_level': service_level } fluent_handler = handler.FluentHandler('rekcurd') formatter = handler.FluentRecordFormatter(custom_format) fluent_handler.setFormatter(formatter) fluent_handler.setLevel(log_level) return fluent_handler
def __init__(self, name): BasicLogger.__init__(self, name) if global_config.LOG_INTERFACE == "fluentd": node = platform.node() custom_format = { 'host': '%(hostname)s', 'where': '%(module)s.%(funcName)s', 'status': '%(levelname)s', 'stack_trace': '%(exc_text)s' } fh = handler.FluentHandler('%s::actinia.worker' % node, host=global_config.LOG_FLUENT_HOST, port=global_config.LOG_FLUENT_PORT) fh_formatter = handler.FluentRecordFormatter(custom_format) fh.setFormatter(fh_formatter) self.log.addHandler(fh)
def _init_logging(loggers): if settings.RUNNING_TESTS: logger = logging.getLogger() logger.setLevel(logging.DEBUG) logger.addHandler(logging.StreamHandler(sys.stdout)) return if settings.ENV == 'development': for _n, _l in loggers.items(): logging.basicConfig(level=getattr(logging, _l)) _logger = logging.getLogger(_n) _logger.addHandler(logging.StreamHandler(sys.stdout)) #_logger.addHandler(logging.StreamHandler(sys.stderr)) if settings.FLUENTD_HANDLER_HOST: from fluent import handler import msgpack from io import BytesIO def overflow_handler(pendings): unpacker = msgpack.Unpacker(BytesIO(pendings)) for unpacked in unpacker: print(unpacked) custom_format = { 'host': '%(hostname)s', 'where': '%(module)s.%(funcName)s', 'type': '%(levelname)s', 'stack_trace': '%(exc_text)s', 'user_id': '%(user_id)s' } for _n, _l in loggers.items(): logging.basicConfig(level=getattr(logging, _l)) _handler = handler.FluentHandler( 'app', host=settings.FLUENTD_HANDLER_HOST, port=int(settings.FLUENTD_HANDLER_PORT), buffer_overflow_handler=overflow_handler) _handler.setFormatter( handler.FluentRecordFormatter(custom_format, fill_missing_fmt_key=True)) _logger = logging.getLogger(_n) _logger.addHandler(_handler) _logger.setLevel(getattr(logging, _l)) _logger.error('Logging initialized') return
def init_log(): logging.basicConfig(level=LOG_LEVEL) logger = logging.getLogger(LOGNAME) log_handler = None if FLUENT_HOST and FLUENT_PORT: log_handler = handler.FluentHandler( 'rssbot', host=FLUENT_HOST, port=FLUENT_PORT) formatter = FluentRecordFormatter(custom_format) else: log_handler= StreamHandler() formatter = handler.FluentRecordFormatter(custom_format) log_handler.setFormatter(formatter) logger.addHandler(log_handler) log = logger
def create_logger(config, name): """Create the multiprocessing logger It will log stderr from all running processes into a single worker logfile and to the fluentd server Args: config: The global config name: The name of the logger Returns: The logger """ # Create the logger for stdout and stderr logging # logger = mp.get_logger() logger = logging.getLogger(name=name) logger.setLevel(logging.INFO) node = platform.node() if config.LOG_INTERFACE == "fluentd" and has_fluent is True: custom_format = { 'host': '%(hostname)s', 'where': '%(module)s.%(funcName)s', 'status': '%(levelname)s', 'stack_trace': '%(exc_text)s' } fh = handler.FluentHandler('%s::actinia.worker' % node, host=config.LOG_FLUENT_HOST, port=config.LOG_FLUENT_PORT) fh_formatter = handler.FluentRecordFormatter(custom_format) fh.setFormatter(fh_formatter) logger.addHandler(fh) # Add the log message handler to the logger log_file_name = '%s.log' % (config.WORKER_LOGFILE) lh = logging.handlers.RotatingFileHandler(log_file_name, maxBytes=2000000, backupCount=5) logger.addHandler(lh) logger.info("Logger %s created" % name) return logger
def create_logger(service_id=None, service_ns=None, build_num=None, gcloud_log_host_port=None, debug=True): if not service_id: service_id = 'unknown' # http://stackoverflow.com/questions/3220284/how-to-customize-the-time-format-for-python-logging root_logger = logging.getLogger() root_logger.setLevel(logging.INFO) ch = logging.StreamHandler(sys.stdout) formatter = StdoutFormatter( '%(asctime)s:%(levelname)s: %(message)s %(context)s', datefmt="%H:%M:%S") ch.setFormatter(formatter) root_logger.addHandler(ch) if not debug: gcloud_logs_parts = gcloud_log_host_port.split(':') if len(gcloud_logs_parts) != 2: raise ValueError( "GCLOUD_LOG_HOST_PORT задан неправильно. Проверьте правильность написания HOST:PORT" ) service = service_ns + '.' + service_id h = handler.FluentHandler(service, host=gcloud_logs_parts[0], port=int(gcloud_logs_parts[1])) g_cloud_formatter = GCloudFormatter() g_cloud_formatter.service = service g_cloud_formatter.build_num = build_num h.setFormatter(g_cloud_formatter) root_logger.addHandler(h) def exit_handler(): # Обязательно закрыть сокет по доке # https://github.com/fluent/fluent-logger-python h.close() atexit.register(exit_handler)
def init_logger(service, host="fluentd", port=24224): """Will initialise a global :class:`Logger` instance to log to fluentd. :tag: String used as tag for fluentd log routing. :host: Host where the fluentd is listening :port: Port where the fluentd is listening """ tag = build_tag(service) logging.basicConfig(level=logging.INFO) l = logging.getLogger(tag) h = handler.FluentHandler(tag, host=host, port=port) formatter = handler.FluentRecordFormatter(custom_format) h.setFormatter(formatter) l.addHandler(h) global log log = Logger(l, service)
def set_logging(logger, log_name, host="127.0.0.1", port=24224, level='INFO'): custom_format = { 'host': '%(hostname)s', 'where': '%(module)s.%(funcName)s', 'type': '%(levelname)s', 'stack_trace': '%(exc_text)s', 'created': '%(created)s', #'stack_info': '%(stack_info)s', } if level == 'INFO': level = logging.INFO if level == 'DEBUG': level = logging.DEBUG logging.basicConfig( level=level, format='%(asctime)s %(name)-8s %(levelname)-8s %(message)s', datefmt='%m-%d %H:%M', filemode='a') # Console settings console = logging.StreamHandler() console.setLevel(level) formatter = logging.Formatter('%(name)-8s: %(levelname)-8s %(message)s') # tell the handler to use this format console.setFormatter(formatter) h = handler.FluentHandler(log_name, host=host, port=port) formatter = handler.FluentRecordFormatter(custom_format) h.setFormatter(formatter) log = logging.getLogger(logger) log.addHandler(h) return log
def __init__(self, log_name): fluentd_log = logging.getLogger(log_name) fluentd_log.setLevel(logging.getLevelName(self.log_level)) ch = handler.FluentHandler( log_name, host=config['LOG_FLUENTD_HOST'], port=int(config['LOG_FLUENTD_PORT']) ) ch.setLevel(logging.getLevelName(self.log_level)) custom_format = { "appname": "%(name)s", "loglevel": "%(levelname)s", "run_id": "%(run_id)s", } formatter = handler.FluentRecordFormatter(custom_format) ch.setFormatter(formatter) fluentd_log.addHandler(ch) self.log = fluentd_log self.run_id = uuid4()
def __init__(self, logger_name: str, level: int, destination: int = DESTINATION_PY_LOGGING): from fluent import handler self.config = natus_config.NATUSConfig('ncqa') self.destination = destination self.log = logging.getLogger(logger_name) self.log.setLevel(level) if self.destination == DESTINATION_PY_LOGGING: ch = logging.StreamHandler() formatter = logging.Formatter( self.config.read_value('log', 'py.log.format')) ch.setFormatter(formatter) self.log.addHandler(ch) elif self.destination == DESTINATION_FLUENTD_MONGODB: self.handler = handler.FluentHandler( logger_name, host=self.config.read_value('log', 'fluentd.hostname'), port=int(self.config.read_value('log', 'fluentd.forward.port')), ) custom_format = { 'host': '%(hostname)s', 'where': '%(module)s.%(funcName)s', 'type': '%(levelname)s', 'stack_trace': '%(exc_text)s' } formatter = handler.FluentRecordFormatter(custom_format) self.handler.setFormatter(formatter) self.log.addHandler(self.handler) else: print('Unrecognized log destination') raise ValueError
# # "propagate": "no" # # } # # }, # # "root": { # 'handlers': ['default'], # 'level': "INFO", # 'propagate': False # } # } # # logging.config.dictConfig(LOGGING) # import logging as _logging from fluent import handler from xsqlmb.settings import LoggerSettings custom_format = { 'host': '%(hostname)s', 'where': '%(module)s.%(funcName)s', #具体到文件、函数 'type': '%(levelname)s', 'stack_trace': '%(exc_text)s' } _logging.basicConfig(level=_logging.DEBUG) logging = _logging.getLogger('fluent.test') h = handler.FluentHandler('mongo.logger', **LoggerSettings) formatter = handler.FluentRecordFormatter(custom_format) h.setFormatter(formatter) logging.addHandler(h)
for unpacked in unpacker: print(unpacked) custom_format = { 'host': '%(hostname)s', 'where': '%(module)s.%(funcName)s', 'type': '%(levelname)s', 'stack_trace': '%(exc_text)s' } logging.basicConfig(level=logging.INFO) l = logging.getLogger('fluent.test') # h = handler.FluentHandler('fluent-python.log', host='fluentd-es.logging', port=24224, buffer_overflow_handler=overflow_handler) h = handler.FluentHandler('fluent-python.log', host='192.168.181.99', port=30224, buffer_overflow_handler=overflow_handler) formatter = handler.FluentRecordFormatter(custom_format) h.setFormatter(formatter) l.addHandler(h) @app.route("/") def hello(): l.info('follow-event-base', {'from': 'userA', 'to': 'userB'}) visits = "hahahahahhahahahahahhahahahahhaahhaha..." html = "<h3>Hello {name}!</h3>" \ "<b>Hostname:</b> {hostname}<br/>" \ "<b>Visits:</b> ${visits}" return html.format(name=os.getenv("NAME", "world"), hostname=socket.gethostname(),
from utils import BlackList from tornado.log import enable_pretty_logging, access_log, app_log, gen_log, LogFormatter from itertools import groupby, islice import operator from collections import Counter from functools import reduce from fluent import handler custom_format = { 'where': '%(module)s.%(funcName)s', 'type': '%(levelname)s', 'stack_trace': '%(exc_text)s' } l = logging.getLogger('mongo.log') h = handler.FluentHandler('mongo.log', host="localhost", port=24224) formatter = handler.FluentRecordFormatter(custom_format) h.setFormatter(formatter) l.addHandler(h) enable_pretty_logging() my_log_format = "[%(levelname)s][%(asctime)s][%(name)s]%(message)s\n" my_log_formatter = LogFormatter(fmt=my_log_format, color=True) root_logger = logging.getLogger() root_streamhandler = root_logger.handlers[0] # root_streamhandler.setLevel('WARN') root_streamhandler.setFormatter(my_log_formatter) MESSAGE_LEN_LIMIT = 30 ERROR_COUNT_LIMIT = 20
FLUENT_HOST = os.getenv('FLUENT_HOST', 'fluentd-logger') FLUENT_PORT = int(os.getenv('FLUENT_PORT', 24224)) FLUENT_TAG = os.getenv('FLUENT_TAG', 'docker.docker-tag') if __name__ == "__main__": msgfmt = { 'host': '%(hostname)s', 'where': '%(module)s.%(funcName)s', 'type': '%(levelname)s', 'stack_trace': '%(exc_text)s', '@timestamp': '%(asctime)s.%(msecs)03d' } datefmt = '%Y-%m-%dT%H:%M:%S' logging.basicConfig(level=logging.INFO) logging.Formatter.converter = time.gmtime l = logging.getLogger('fluent.test') h = handler.FluentHandler(FLUENT_TAG, host=FLUENT_HOST, port=FLUENT_PORT) formatter = handler.FluentRecordFormatter(msgfmt, datefmt=datefmt) h.setFormatter(formatter) l.addHandler(h) while True: l.info({'from': 'userA', 'to': 'userB'}) l.info('{"from": "userC", "to": "userD"}') l.info( "This log entry will be logged with the additional key: 'message'." ) time.sleep(10)
import os import socket from fluent import sender from fluent import event import logging from fluent import handler custom_format = { 'host': '%(hostname)s', 'where': '%(module)s.%(funcName)s', 'type': '%(levelname)s', 'stack_trace': '%(exc_text)s' } logging.basicConfig(level=logging.INFO) l = logging.getLogger(__name__) h = handler.FluentHandler('app.follow', host='localhost', port=24224) formatter = handler.FluentRecordFormatter(custom_format) h.setFormatter(formatter) l.addHandler(h) l.info({ 'from': 'userA', 'to': 'userB' }) l.info('{"from": "userC", "to": "userD"}') l.info("This log entry will be logged with the additional key: 'message'.") l.error("This log entry will be logged with the additional key: 'message'.")
console_handler = logging.StreamHandler(sys.stdout) console_handler.setFormatter(log_row_format) ADMINISTRATIVE_LEVEL = 9 logging.addLevelName(ADMINISTRATIVE_LEVEL, "ADMIN") structured_log_format = { 'host': '%(hostname)s', 'time': '%(asctime)s', 'function': '%(module)s.%(funcName)s', 'level': '%(levelname)s', 'stack_trace': '%(exc_text)s' } fluentd_handler = fluent_handler.FluentHandler('zodiac.logs', host='localhost', port=2104) # noinspection PyTypeChecker formatter = fluent_handler.FluentRecordFormatter(structured_log_format) fluentd_handler.setFormatter(formatter) fluentd_handler.setLevel(ADMINISTRATIVE_LEVEL) fluentd_logger = logging.getLogger('fluentd_logger') fluentd_logger.setLevel(ADMINISTRATIVE_LEVEL) fluentd_logger.addHandler(fluentd_handler) fluentd_logger.propagate = False def handle_unhandled_exception(exc_type, exc_value, exc_traceback): """Handler for unhandled exceptions that will write to the logs""" if issubclass(exc_type, KeyboardInterrupt): # call the default excepthook saved at __excepthook__
import logging from django.conf import settings from fluent import handler logger = logging.getLogger("platformback") logger.setLevel(level=logging.INFO) logHandler = handler.FluentHandler( settings.FLUENTD_TAG, host=settings.FLUENTD_SERVER, port=int(settings.FLUENTD_PORT), nanosecond_precision=True, ) formatter = handler.FluentRecordFormatter() logHandler.setFormatter(formatter) logger.addHandler(logHandler) def log_user_connect(func): def wrapper(*args, **kwargs): user = func(*args, **kwargs) logger.info({ "env": settings.ENV, "email": user.email, "action": "connect_to_platform", "message": f"{user.email} has just logged on to the platform", })
from io import BytesIO def overflow_handler(pendings): unpacker = msgpack.Unpacker(BytesIO(pendings)) for unpacked in unpacker: print(unpacked) fluent_format = { 'host': '%(hostname)s', 'where': '%(module)s.%(funcName)s', 'type': '%(levelname)s', 'stack_trace': '%(exc_text)s' } h = handler.FluentHandler('pot.packet', host=FLUENT_HOST, port=FLUENT_PORT, buffer_overflow_handler=overflow_handler) formatter = handler.FluentRecordFormatter(fluent_format) h.setFormatter(formatter) logger.addHandler(h) def logging_packet(packet: Packet): ip = packet[IP] tcp = packet[TCP] flags = tcp.flags if ip.src == TARGET_IP_ADDRESS: gid = hashlib.sha256(f"{ip.src}:{tcp.sport} > {ip.dst}:{tcp.dport}".encode()).hexdigest() else: gid = hashlib.sha256(f"{ip.dst}:{tcp.dport} > {ip.src}:{tcp.sport}".encode()).hexdigest()
thread_log.setLevel(logging.INFO) # set up memory profiler log handler to feed memory profiler output into fluentd memory_log = logging.getLogger("memory_profiler") if TURBOGEARS_PROFILER_LOG_TO_CONSOLE: mem_out_hdlr = logging.StreamHandler(sys.stdout) mem_out_hdlr.setLevel(logging.INFO) memory_log.addHandler(mem_out_hdlr) fluentd_format = { 'hostname': '%(hostname)s', 'where': '%(controller_module)s.%(controller_class)s.%(endpoint)s' } mem_fluent_hdlr = handler.FluentHandler('turbogears.memory_profiler', host=FLUENTD_HOST_NAME, port=FLUENTD_PORT) mem_fluentd_formatter = handler.FluentRecordFormatter(fluentd_format) mem_fluent_hdlr.setFormatter(mem_fluentd_formatter) mem_fluent_hdlr.setLevel(logging.INFO) memory_log.addHandler(mem_fluent_hdlr) memory_log.setLevel(logging.INFO) thread_log.info( 'turbogears memory profiler settings: FLUENTD_HOST_NAME={} FLUENTD_PORT={} ' 'TURBOGEARS_PROFILER_FIFO_PATH={} ' 'TURBOGEARS_PROFILER_FIFO_NAME={} ' 'TURBOGEARS_PROFILER_LOG_TO_CONSOLE={}'.format( FLUENTD_HOST_NAME, FLUENTD_PORT, TURBOGEARS_PROFILER_FIFO_PATH, TURBOGEARS_PROFILER_FIFO_NAME, TURBOGEARS_PROFILER_LOG_TO_CONSOLE))
'type': '%(levelname)s', 'stack_trace': '%(exc_text)s' } def overflow_handler(pending): unpacker = msgpack.Unpacker(BytesIO(pending)) for unpacked in unpacker: print(unpacked) logging.basicConfig(level=logging.INFO, ) l = logging.getLogger('fluent.test') h = handler.FluentHandler('app.follow', host='localhost', port=24224, buffer_overflow_handler=overflow_handler) formatter = handler.FluentRecordFormatter(custom_format) h.setFormatter(formatter) l.addHandler(h) def main(): while True: divisor = random.choice([0, 2]) random_number = random.randint(1, 1000) try: logging.info(f"{random_number} divided by {divisor}.") quotient, remainder = divmod(random_number, divisor) if remainder != 0: