def init(self, prop): self.logger = logging.getLogger("fluent") self.handlers = [] if FluentBit.s_logger is None: FluentBit.s_logger = self logging.PARANOID = logging.DEBUG - 3 logging.VERBOSE = logging.DEBUG - 2 logging.TRACE = logging.DEBUG - 1 logging.FATAL = logging.ERROR + 1 logging.addLevelName(logging.PARANOID, "PARANOID") logging.addLevelName(logging.VERBOSE, "VERBOSE") logging.addLevelName(logging.TRACE, "TRACE") logging.addLevelName(logging.FATAL, "FATAL") leaf0 = prop.getLeaf() for l in leaf0: key = l.getName() if key.find("output") != -1: formatter = handler.FluentRecordFormatter() tag = l.getProperty("tag") if tag == "": return False host = l.getProperty("host") if host == "": host = "127.0.0.1" port = l.getProperty("port") try: port = int(port) except: port = 24224 fhdlr = handler.FluentHandler(tag, host=host, port=port) fmt = { "time": "%(asctime)s", "name": "%(name)s", "level": "%(levelname)s", } formatter = handler.FluentRecordFormatter(fmt=fmt) #formatter = logging.Formatter('{Time:%(asctime)s,Name:%(name)s,LEVEL:%(levelname)s,MESSAGE:%(message)s}') fhdlr.setFormatter(formatter) self.handlers.append(fhdlr) self.logger.addHandler(fhdlr) self.logger.setLevel(logging.INFO) return True
def __init__(self, logger_name: str = 'rekcurd_dashboard', log_level: int = logging.NOTSET) -> None: """ constructor :param logger_name: logger name :param log_level: """ super().__init__() self.log = logging.getLogger(logger_name) self.log.setLevel(log_level) custom_format = { 'host': gethostname(), 'short_message': '%(message)s', 'timestamp': '%(created)d.%(msecs)d', 'level': '%(loglevel)d' } fluent_handler = handler.FluentHandler('rekcurd_dashboard') formatter = handler.FluentRecordFormatter(custom_format) fluent_handler.setFormatter(formatter) fluent_handler.setLevel(log_level) self.log.handlers = [] self.log.addHandler(fluent_handler)
def add_fluentd(self): """ Adds a handler for fluentd if the hostname has been defined """ try: if self.fluentd_hostname: try: if self._handlers["fluentd"]: self._handlers["fluentd"].close() except KeyError: self._handlers["fluentd"] = None print("sending logs to fluentd at: {}".format(( self.fluentd_tag, self.fluentd_record, self.fluentd_hostname, self.fluentd_port, ))) self._handlers["fluentd"] = fluent_handler.FluentHandler( "{}.{}".format(self.fluentd_tag, self.fluentd_record), host=self.fluentd_hostname, port=self.fluentd_port, ) fluentd_formatter = fluent_handler.FluentRecordFormatter( self.format("fluentd")) self._handlers["fluentd"].setFormatter(fluentd_formatter) self._logger.addHandler(self._handlers["fluentd"]) self._logger.addFilter(ContextFilter()) except AttributeError: self._logger.warning( "Fluentd hostname not defined in settings. Skipping!")
def getlogger(name, log_file, log_level="INFO"): importer_logger = logging.getLogger(name) importer_logger.setLevel(log_level) # Handler to send logs in files fh = logging.FileHandler(filename=log_file) fh.setLevel(log_level) formatter_file = logging.Formatter( '%(asctime)s - %(msecs)d - %(funcName)s - %(lineno)d : %(levelname)s : %(message)s' ) fh.setFormatter(formatter_file) importer_logger.addHandler(fh) # Handler to send logs in Elasticsearch flh = handler.FluentHandler(name, host=cst.FLUENT_CONFIGURATION["SERVER"], port=cst.FLUENT_CONFIGURATION["PORT"], buffer_overflow_handler=overflow_handler) custom_format = { 'host': '%(hostname)s', 'where': '%(module)s.%(funcName)s', 'type': '%(levelname)s', 'stack_trace': '%(exc_text)s', 'lineno': '%(lineno)s', 'pathname': '%(pathname)s', } formatter_flh = fluent_handler.FluentRecordFormatter(custom_format) flh.setFormatter(formatter_flh) importer_logger.addHandler(flh) return importer_logger
def __init__(self, tag, ip, port, loggerName="fluent.test"): logging.basicConfig(level=logging.INFO) self._logger = logging.getLogger(loggerName) self._loggerHandler = handler.FluentHandler(tag, host=ip, port=port) self._formatter = handler.FluentRecordFormatter(customFormat) self._loggerHandler.setFormatter(self._formatter) self._logger.addHandler(self._loggerHandler)
def handle(): logging.basicConfig(level=logging.INFO) l = logging.getLogger('fluent.handle') h = handler.FluentHandler('app', host='127.0.0.1', port=24224) formatter = handler.FluentRecordFormatter(custom_format) h.setFormatter(formatter) l.addHandler(h) l.info({'from': 'CC', 'to': 'DD'})
def custom(): logging.basicConfig(level=logging.INFO) l = logging.getLogger('fluent.custom') h = MyFluentdHandler() formatter = handler.FluentRecordFormatter(custom_format) h.setFormatter(formatter) l.addHandler(h) l.info({'from': 'EE', 'to': 'FF'})
def configure_logger(self, event): config = inject.instance(applauncher.kernel.Configuration) logger_config = config.fluent.log_handler if logger_config.enabled: h = handler.FluentHandler(config.fluent.tag, host=logger_config.host, port=logger_config.port) formatter = handler.FluentRecordFormatter( json.loads(logger_config.format.replace("'", '"'))) h.setFormatter(formatter) self.log_handlers.append(h)
def get_logger(name, fluentd_host='localhost', fluentd_port=24224): """Get a Python logger instance which forwards logs to Fluentd.""" logger = logging.getLogger(name) fluent_handler = handler.FluentHandler( 'mole.logs', host=fluentd_host, port=fluentd_port, buffer_overflow_handler=overflow_handler) formatter = handler.FluentRecordFormatter(custom_format, format_json=False) fluent_handler.setFormatter(formatter) logger.addHandler(fluent_handler) return logger
def kafka_logger(log_name): line_format = { 'file_name': '%(module)s', 'log_level': '%(levelname)s', 'line': '%(lineno)d' } files_handle = handler.FluentHandler(log_name, host='10.19.47.136', port=54224) formatter = handler.FluentRecordFormatter(line_format) files_handle.setFormatter(formatter) logger = logging.getLogger(log_name) logger.setLevel(logging.INFO) logger.addHandler(files_handle) return logger
def __init_fluent_handler(self, application_name: str, service_level: str, log_level: int): custom_format = { 'host': gethostname(), 'short_message': '%(message)s', 'timestamp': '%(created)d.%(msecs)d', 'level': '%(loglevel)d', 'service': 'rekcurd', 'ml_service': application_name, 'service_level': service_level } fluent_handler = handler.FluentHandler('rekcurd') formatter = handler.FluentRecordFormatter(custom_format) fluent_handler.setFormatter(formatter) fluent_handler.setLevel(log_level) return fluent_handler
def __init__(self, name): BasicLogger.__init__(self, name) if global_config.LOG_INTERFACE == "fluentd": node = platform.node() custom_format = { 'host': '%(hostname)s', 'where': '%(module)s.%(funcName)s', 'status': '%(levelname)s', 'stack_trace': '%(exc_text)s' } fh = handler.FluentHandler('%s::actinia.worker' % node, host=global_config.LOG_FLUENT_HOST, port=global_config.LOG_FLUENT_PORT) fh_formatter = handler.FluentRecordFormatter(custom_format) fh.setFormatter(fh_formatter) self.log.addHandler(fh)
def _init_logging(loggers): if settings.RUNNING_TESTS: logger = logging.getLogger() logger.setLevel(logging.DEBUG) logger.addHandler(logging.StreamHandler(sys.stdout)) return if settings.ENV == 'development': for _n, _l in loggers.items(): logging.basicConfig(level=getattr(logging, _l)) _logger = logging.getLogger(_n) _logger.addHandler(logging.StreamHandler(sys.stdout)) #_logger.addHandler(logging.StreamHandler(sys.stderr)) if settings.FLUENTD_HANDLER_HOST: from fluent import handler import msgpack from io import BytesIO def overflow_handler(pendings): unpacker = msgpack.Unpacker(BytesIO(pendings)) for unpacked in unpacker: print(unpacked) custom_format = { 'host': '%(hostname)s', 'where': '%(module)s.%(funcName)s', 'type': '%(levelname)s', 'stack_trace': '%(exc_text)s', 'user_id': '%(user_id)s' } for _n, _l in loggers.items(): logging.basicConfig(level=getattr(logging, _l)) _handler = handler.FluentHandler( 'app', host=settings.FLUENTD_HANDLER_HOST, port=int(settings.FLUENTD_HANDLER_PORT), buffer_overflow_handler=overflow_handler) _handler.setFormatter( handler.FluentRecordFormatter(custom_format, fill_missing_fmt_key=True)) _logger = logging.getLogger(_n) _logger.addHandler(_handler) _logger.setLevel(getattr(logging, _l)) _logger.error('Logging initialized') return
def init_log(): logging.basicConfig(level=LOG_LEVEL) logger = logging.getLogger(LOGNAME) log_handler = None if FLUENT_HOST and FLUENT_PORT: log_handler = handler.FluentHandler( 'rssbot', host=FLUENT_HOST, port=FLUENT_PORT) formatter = FluentRecordFormatter(custom_format) else: log_handler= StreamHandler() formatter = handler.FluentRecordFormatter(custom_format) log_handler.setFormatter(formatter) logger.addHandler(log_handler) log = logger
def create_logger(config, name): """Create the multiprocessing logger It will log stderr from all running processes into a single worker logfile and to the fluentd server Args: config: The global config name: The name of the logger Returns: The logger """ # Create the logger for stdout and stderr logging # logger = mp.get_logger() logger = logging.getLogger(name=name) logger.setLevel(logging.INFO) node = platform.node() if config.LOG_INTERFACE == "fluentd" and has_fluent is True: custom_format = { 'host': '%(hostname)s', 'where': '%(module)s.%(funcName)s', 'status': '%(levelname)s', 'stack_trace': '%(exc_text)s' } fh = handler.FluentHandler('%s::actinia.worker' % node, host=config.LOG_FLUENT_HOST, port=config.LOG_FLUENT_PORT) fh_formatter = handler.FluentRecordFormatter(custom_format) fh.setFormatter(fh_formatter) logger.addHandler(fh) # Add the log message handler to the logger log_file_name = '%s.log' % (config.WORKER_LOGFILE) lh = logging.handlers.RotatingFileHandler(log_file_name, maxBytes=2000000, backupCount=5) logger.addHandler(lh) logger.info("Logger %s created" % name) return logger
def init_logger(service, host="fluentd", port=24224): """Will initialise a global :class:`Logger` instance to log to fluentd. :tag: String used as tag for fluentd log routing. :host: Host where the fluentd is listening :port: Port where the fluentd is listening """ tag = build_tag(service) logging.basicConfig(level=logging.INFO) l = logging.getLogger(tag) h = handler.FluentHandler(tag, host=host, port=port) formatter = handler.FluentRecordFormatter(custom_format) h.setFormatter(formatter) l.addHandler(h) global log log = Logger(l, service)
def set_logging(logger, log_name, host="127.0.0.1", port=24224, level='INFO'): custom_format = { 'host': '%(hostname)s', 'where': '%(module)s.%(funcName)s', 'type': '%(levelname)s', 'stack_trace': '%(exc_text)s', 'created': '%(created)s', #'stack_info': '%(stack_info)s', } if level == 'INFO': level = logging.INFO if level == 'DEBUG': level = logging.DEBUG logging.basicConfig( level=level, format='%(asctime)s %(name)-8s %(levelname)-8s %(message)s', datefmt='%m-%d %H:%M', filemode='a') # Console settings console = logging.StreamHandler() console.setLevel(level) formatter = logging.Formatter('%(name)-8s: %(levelname)-8s %(message)s') # tell the handler to use this format console.setFormatter(formatter) h = handler.FluentHandler(log_name, host=host, port=port) formatter = handler.FluentRecordFormatter(custom_format) h.setFormatter(formatter) log = logging.getLogger(logger) log.addHandler(h) return log
def __init__(self, log_name): fluentd_log = logging.getLogger(log_name) fluentd_log.setLevel(logging.getLevelName(self.log_level)) ch = handler.FluentHandler( log_name, host=config['LOG_FLUENTD_HOST'], port=int(config['LOG_FLUENTD_PORT']) ) ch.setLevel(logging.getLevelName(self.log_level)) custom_format = { "appname": "%(name)s", "loglevel": "%(levelname)s", "run_id": "%(run_id)s", } formatter = handler.FluentRecordFormatter(custom_format) ch.setFormatter(formatter) fluentd_log.addHandler(ch) self.log = fluentd_log self.run_id = uuid4()
def __init__(self, logger_name: str, level: int, destination: int = DESTINATION_PY_LOGGING): from fluent import handler self.config = natus_config.NATUSConfig('ncqa') self.destination = destination self.log = logging.getLogger(logger_name) self.log.setLevel(level) if self.destination == DESTINATION_PY_LOGGING: ch = logging.StreamHandler() formatter = logging.Formatter( self.config.read_value('log', 'py.log.format')) ch.setFormatter(formatter) self.log.addHandler(ch) elif self.destination == DESTINATION_FLUENTD_MONGODB: self.handler = handler.FluentHandler( logger_name, host=self.config.read_value('log', 'fluentd.hostname'), port=int(self.config.read_value('log', 'fluentd.forward.port')), ) custom_format = { 'host': '%(hostname)s', 'where': '%(module)s.%(funcName)s', 'type': '%(levelname)s', 'stack_trace': '%(exc_text)s' } formatter = handler.FluentRecordFormatter(custom_format) self.handler.setFormatter(formatter) self.log.addHandler(self.handler) else: print('Unrecognized log destination') raise ValueError
custom_format = { 'host': '%(hostname)s', 'where': '%(module)s.%(funcName)s', 'type': '%(levelname)s', 'stack_trace': '%(exc_text)s' } logging.basicConfig(level=logging.INFO) l = logging.getLogger('fluent.test') # h = handler.FluentHandler('fluent-python.log', host='fluentd-es.logging', port=24224, buffer_overflow_handler=overflow_handler) h = handler.FluentHandler('fluent-python.log', host='192.168.181.99', port=30224, buffer_overflow_handler=overflow_handler) formatter = handler.FluentRecordFormatter(custom_format) h.setFormatter(formatter) l.addHandler(h) @app.route("/") def hello(): l.info('follow-event-base', {'from': 'userA', 'to': 'userB'}) visits = "hahahahahhahahahahahhahahahahhaahhaha..." html = "<h3>Hello {name}!</h3>" \ "<b>Hostname:</b> {hostname}<br/>" \ "<b>Visits:</b> ${visits}" return html.format(name=os.getenv("NAME", "world"), hostname=socket.gethostname(), visits=visits)
FLUENT_HOST = os.getenv('FLUENT_HOST', 'fluentd-logger') FLUENT_PORT = int(os.getenv('FLUENT_PORT', 24224)) FLUENT_TAG = os.getenv('FLUENT_TAG', 'docker.docker-tag') if __name__ == "__main__": msgfmt = { 'host': '%(hostname)s', 'where': '%(module)s.%(funcName)s', 'type': '%(levelname)s', 'stack_trace': '%(exc_text)s', '@timestamp': '%(asctime)s.%(msecs)03d' } datefmt = '%Y-%m-%dT%H:%M:%S' logging.basicConfig(level=logging.INFO) logging.Formatter.converter = time.gmtime l = logging.getLogger('fluent.test') h = handler.FluentHandler(FLUENT_TAG, host=FLUENT_HOST, port=FLUENT_PORT) formatter = handler.FluentRecordFormatter(msgfmt, datefmt=datefmt) h.setFormatter(formatter) l.addHandler(h) while True: l.info({'from': 'userA', 'to': 'userB'}) l.info('{"from": "userC", "to": "userD"}') l.info( "This log entry will be logged with the additional key: 'message'." ) time.sleep(10)
ADMINISTRATIVE_LEVEL = 9 logging.addLevelName(ADMINISTRATIVE_LEVEL, "ADMIN") structured_log_format = { 'host': '%(hostname)s', 'time': '%(asctime)s', 'function': '%(module)s.%(funcName)s', 'level': '%(levelname)s', 'stack_trace': '%(exc_text)s' } fluentd_handler = fluent_handler.FluentHandler('zodiac.logs', host='localhost', port=2104) # noinspection PyTypeChecker formatter = fluent_handler.FluentRecordFormatter(structured_log_format) fluentd_handler.setFormatter(formatter) fluentd_handler.setLevel(ADMINISTRATIVE_LEVEL) fluentd_logger = logging.getLogger('fluentd_logger') fluentd_logger.setLevel(ADMINISTRATIVE_LEVEL) fluentd_logger.addHandler(fluentd_handler) fluentd_logger.propagate = False def handle_unhandled_exception(exc_type, exc_value, exc_traceback): """Handler for unhandled exceptions that will write to the logs""" if issubclass(exc_type, KeyboardInterrupt): # call the default excepthook saved at __excepthook__ sys.__excepthook__(exc_type, exc_value, exc_traceback) return logger = get_logger(logger_name='Unhandled-Exception-Catcher')
# set up memory profiler log handler to feed memory profiler output into fluentd memory_log = logging.getLogger("memory_profiler") if TURBOGEARS_PROFILER_LOG_TO_CONSOLE: mem_out_hdlr = logging.StreamHandler(sys.stdout) mem_out_hdlr.setLevel(logging.INFO) memory_log.addHandler(mem_out_hdlr) fluentd_format = { 'hostname': '%(hostname)s', 'where': '%(controller_module)s.%(controller_class)s.%(endpoint)s' } mem_fluent_hdlr = handler.FluentHandler('turbogears.memory_profiler', host=FLUENTD_HOST_NAME, port=FLUENTD_PORT) mem_fluentd_formatter = handler.FluentRecordFormatter(fluentd_format) mem_fluent_hdlr.setFormatter(mem_fluentd_formatter) mem_fluent_hdlr.setLevel(logging.INFO) memory_log.addHandler(mem_fluent_hdlr) memory_log.setLevel(logging.INFO) thread_log.info( 'turbogears memory profiler settings: FLUENTD_HOST_NAME={} FLUENTD_PORT={} ' 'TURBOGEARS_PROFILER_FIFO_PATH={} ' 'TURBOGEARS_PROFILER_FIFO_NAME={} ' 'TURBOGEARS_PROFILER_LOG_TO_CONSOLE={}'.format( FLUENTD_HOST_NAME, FLUENTD_PORT, TURBOGEARS_PROFILER_FIFO_PATH, TURBOGEARS_PROFILER_FIFO_NAME, TURBOGEARS_PROFILER_LOG_TO_CONSOLE))
div = offer.find(attrs={"class": "m-productsBox_variantSizes js-variant_size is-active"}) try: spans = div.findAll("span") except AttributeError: return [] sizes = [] for val in (span.text for span in spans): sizes.append('eu' + format_size_number(val)) return sizes if __name__ == '__main__': log_format = { 'where': '%(module)s.%(funcName)s', 'type': '%(levelname)s', 'stack_trace': '%(exc_text)s', } logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger('') logger.setLevel(level=logging.DEBUG) h = asynchandler.FluentHandler('kicks.scraper.%s' % scraper_name, host='localhost', port=24224) h.setLevel(level=logging.DEBUG) formatter = handler.FluentRecordFormatter(log_format) h.setFormatter(formatter) logging.getLogger('').addHandler(h) items = sizeer_parse() writer = SessionedWriter(scraper_name, items) writer.write_items()