def a_async_handler(host='0.0.0.0', port=5000, formatter=None): async_handler = AsynchronousLogstashHandler(host, port, database_path=None) formatter = formatter if formatter else a_formatter() async_handler.setFormatter(formatter) return async_handler
def get_logger(): global hd_log if hd_log is None: BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) print('base_dir', BASE_DIR) log_file_path = os.path.join(BASE_DIR, 'Log/python-web.log') err_log_file_path = os.path.join(BASE_DIR, 'Log/python-web-err.log') logger.add(log_file_path, rotation="50 MB", encoding='utf-8') # Automatically rotate too big file logger.add(err_log_file_path, rotation="50 MB", encoding='utf-8', level='ERROR') # Automatically rotate too big file # logstash logstash_ip = get_config('logstash', 'ip') if logstash_ip: logstash_port = get_config('logstash', 'port') logstash_handler = AsynchronousLogstashHandler(logstash_ip, int(logstash_port), database_path=None) logstash_formatter = LogstashFormatter( message_type='python-logstash', extra_prefix='', extra=dict(app_name='python-web')) logstash_handler.setFormatter(logstash_formatter) logger.add(sink=logstash_handler) hd_log = logger return hd_log
def configure_logging(service_name): """ Configure logging based on the settings in the settings file. This sets up a handler for each logging mode that is enabled. See `microservice.core.settings.LoggingMode` for the supported logging types. :param str service_name: Name of the service being served by this instance. """ logger = logging.getLogger() logger.setLevel(settings.logging_level) formatter_kwargs = { 'fmt': json.dumps({'extra': { 'local_service': service_name, # Basic way to distinguish logs between instances of the same microservice. 'instance_id': random.randint(100000, 999999) }}) } formatter = LogstashFormatterV1(**formatter_kwargs) if settings.LoggingMode.FILE in settings.logging_modes: file_handler = logging.FileHandler('{}.log'.format(service_name)) file_handler.setFormatter(formatter) file_handler.addFilter(RequestIDLogFilter()) logger.addHandler(file_handler) if settings.LoggingMode.HUMAN in settings.logging_modes: stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setFormatter(HumanReadableLogstashFormatter(**formatter_kwargs)) stdout_handler.addFilter(RequestIDLogFilter()) logger.addHandler(stdout_handler) if settings.LoggingMode.STDOUT in settings.logging_modes: stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setFormatter(formatter) stdout_handler.addFilter(RequestIDLogFilter()) logger.addHandler(stdout_handler) if settings.LoggingMode.LOGSTASH in settings.logging_modes: # TODO: test this raise Exception("Warning: untested") logstash_handler = AsynchronousLogstashHandler( **settings.logstash_settings) logstash_handler.setFormatter(formatter) logstash_handler.addFilter(RequestIDLogFilter()) logger.addHandler(logstash_handler) if settings.LoggingMode.FLUENTD in settings.logging_modes: # TODO: test this raise Exception("Warning: untested") fluentd_handler = handler.FluentHandler( 'pycroservices.follow', **settings.fluentd_settings, buffer_overflow_handler=overflow_handler) fluentd_handler.setFormatter(formatter) fluentd_handler.addFilter(RequestIDLogFilter()) logger.addHandler(fluentd_handler)
def configure_logging(self): # Filters str_format_filter = dist_zero.logging.StrFormatFilter() context = { 'env': settings.DIST_ZERO_ENV, 'mode': runners.MODE_SIMULATED, 'runner': True, 'simulator_id': self.id, 'start_at': self._start_datetime, } if settings.LOGZ_IO_TOKEN: context['token'] = settings.LOGZ_IO_TOKEN context_filter = dist_zero.logging.ContextFilter(context) # Formatters human_formatter = dist_zero.logging.HUMAN_FORMATTER json_formatter = dist_zero.logging.JsonFormatter( '(asctime) (levelname) (name) (message)') # Handlers stdout_handler = logging.StreamHandler(sys.stdout) human_file_handler = logging.FileHandler('./.tmp/simulator.log') json_file_handler = logging.FileHandler('./.tmp/simulator.json.log') logstash_handler = AsynchronousLogstashHandler( settings.LOGSTASH_HOST, settings.LOGSTASH_PORT, database_path='./.tmp/logstash.db', ) stdout_handler.setLevel(logging.ERROR) human_file_handler.setLevel(logging.DEBUG) json_file_handler.setLevel(logging.DEBUG) logstash_handler.setLevel(logging.DEBUG) stdout_handler.setFormatter(human_formatter) human_file_handler.setFormatter(human_formatter) json_file_handler.setFormatter(json_formatter) logstash_handler.setFormatter(json_formatter) stdout_handler.addFilter(str_format_filter) human_file_handler.addFilter(str_format_filter) json_file_handler.addFilter(str_format_filter) json_file_handler.addFilter(context_filter) logstash_handler.addFilter(str_format_filter) logstash_handler.addFilter(context_filter) # Loggers dist_zero_logger = logging.getLogger('dist_zero') root_logger = logging.getLogger() dist_zero.logging.set_handlers(root_logger, [ json_file_handler, human_file_handler, logstash_handler, stdout_handler, ])
def _bind_handlers(self): """ Binds Logstash Handlers to the Logging object """ load_dotenv() handler = AsynchronousLogstashHandler( host=os.environ["LOGSTASH_HOST"], port=int(os.environ["LOGSTASH_PORT"]), database_path=None, ) handler.setFormatter(FlaskLogstashFormatter()) self._logger.addHandler(handler)
def get_handler(extra={}, formatter=None): extra['logstash_host'] = logstash_host extra['logstash_port'] = logstash_port if not formatter: formatter = get_formatter(extra) handler = AsynchronousLogstashHandler(logstash_host, logstash_port, database_path=database_path) handler.setFormatter(formatter) return handler
def __init__(self): self.logger = logging.getLogger("logstash") self.logger.setLevel(logging.INFO) try: host = os.environ["LOGSTASH_HOST"] except: host = "localhost" try: port = int(os.environ["LOGSTASH_PORT"]) except: port = 5044 handler = AsynchronousLogstashHandler(host=host, port=port, ssl_enable=False, ssl_verify=False, database_path='') formatter = LogstashFormatter() handler.setFormatter(formatter) self.logger.addHandler(handler) self.info("Init logger")
def initiate_log(): # Create the logger and set it's logging level logger = logging.getLogger("logstash") logger.setLevel(logging.DEBUG) # Create the handler handler = AsynchronousLogstashHandler( host='ab5413e7-7e28-45a6-bdaa-d6c3e00cab46-ls.logit.io', port=27421, ssl_enable=True, ssl_verify=False, database_path='') # Here you can specify additional formatting on your log record/message formatter = LogstashFormatter() handler.setFormatter(formatter) # Assign handler to the logger logger.addHandler(handler) return logger
def initLogger(appConfig: dict): # formatting for log stash logstashFormatter = LogstashFormatter( message_type='python-logstash', extra=dict(application='mis_weekly_report_gen_service')) # set app logger name and minimum logging level appLogger = logging.getLogger('python-logstash-logger') appLogger.setLevel(logging.INFO) # configure console logging streamHandler = logging.StreamHandler() # streamHandler.setFormatter(logstashFormatter) appLogger.addHandler(streamHandler) # configure logstash logging host = appConfig["logstashHost"] port = appConfig["logstashPort"] if not (pd.isna(host)) and not (pd.isna(port)): logstashHandler = AsynchronousLogstashHandler( host, port, database_path='logstash.db') logstashHandler.setFormatter(logstashFormatter) appLogger.addHandler(logstashHandler) AppLogger.__instance = appLogger
logstash_formatter = LogstashFormatter(message_type='python-logstash', extra_prefix='dev', extra=dict(application='example-app', environment='production')) test_logger = logging.getLogger('python-logstash-logger') test_logger.setLevel(logging.INFO) streamHandler = logging.StreamHandler() # streamHandler.setFormatter(logstash_formatter) test_logger.addHandler(streamHandler) logstashHandler = AsynchronousLogstashHandler(host, port, database_path='logstash.db') logstashHandler.setFormatter(logstash_formatter) test_logger.addHandler(logstashHandler) # If you don't want to write to a SQLite database, then you do # not have to specify a database_path. # NOTE: Without a database, messages are lost between process restarts. # test_logger.addHandler(AsynchronousLogstashHandler(host, port)) test_logger.error('python-logstash-async: test logstash error message.') test_logger.info('python-logstash-async: test logstash info message.') test_logger.warning('python-logstash-async: test logstash warning message.') # add extra field to logstash message extra = dict(test_string="python_version-" + repr(sys.version_info), test_boolean=True, test_dict={
# Create the logger and set it's logging level logger_logit = logging.getLogger("logstash") logger_logit.setLevel(logging.ERROR) # Create the handler handler = AsynchronousLogstashHandler( host='fc652908-5b50-4887-8af2-89286e6febe1-ls.logit.io', port=17326, ssl_enable=True, ssl_verify=False, database_path='') # Here you can specify additional formatting on your log record/message formatter = LogstashFormatter(message_type='python-logstash', extra_prefix='extra', extra=dict(mikrostoritev='imageUpload', okolje='production')) handler.setFormatter(formatter) # Assign handler to the logger logger_logit.addHandler(handler) try: # if config_path.exists(): if os.path.exists(config_path): exec(open(config_path).read()) else: # exec(open(config_path_template).read()) pass except Exception as e: print("No configuration files found: ", e) class Config(object):
def __init__(self, level=logging.DEBUG): self.xpr_config = XprConfigParser( config_file_path=XprConfigParser.DEFAULT_CONFIG_PATH_XPR_LOG) if self.xpr_config[self.LOGGING_SECTION][self.FIND_CONFIG_RECURSIVE]: self.xpr_config = self.load_config("xpr") self.name = self.xpr_config[self.PROJECT_NAME] super(XprLogger, self).__init__(self.name) self.setLevel(level) logger_formatter = XprCustomFormatter( self.xpr_config[self.LOGGING_SECTION][self.FORMATTER]) logstash_formatter = XprLogstashCustomFormatter( self.xpr_config[self.LOGGING_SECTION][self.FORMATTER]) log_folder = os.path.expanduser( self.xpr_config[self.LOGGING_SECTION][self.LOGS_FOLDER_PATH]) if not os.path.exists(log_folder): try: os.makedirs(log_folder, 0o755) except IOError as err: print( "Permission Denied to create logs folder at the specidied directory. \n{}".format( str(err))) # Adding file handler for levels below warning try: if self.xpr_config[self.LOGGING_SECTION][self.LOGGING_FILE_BOOL]: try: wfh = logging.FileHandler(os.path.join( log_folder, '.'.join((self.xpr_config[self.PROJECT_NAME], "log"))), 'w') except IOError as err: print("Permission denied to create log files. " "Saving log files in base directory . \n{}".format( str(err))) wfh = logging.FileHandler( os.path.join(os.path.expanduser("~"), '.'.join((self.xpr_config[ self.PROJECT_NAME], "log"))), 'w') wfh.setFormatter(logger_formatter) wfh.setLevel(logging.DEBUG) self.addHandler(wfh) except Exception as err: print("Unable to add file handler to logger. \n{}".format(str(err))) raise err # Adding file handler for levels more critical than warning try: if self.xpr_config[self.LOGGING_SECTION][self.LOGGING_FILE_BOOL]: try: efh = logging.FileHandler(os.path.join( log_folder, '.'.join((self.xpr_config[self.PROJECT_NAME], "err"))), 'w') except IOError as err: print("Permission denied to create log files. " "Saving log files in base directory . \n{}".format( str(err))) efh = logging.FileHandler( os.path.join(os.path.expanduser("~"), '.'.join((self.xpr_config[ self.PROJECT_NAME], "err"))), 'w') efh.setFormatter(logger_formatter) efh.setLevel(logging.ERROR) self.addHandler(efh) except Exception as err: print( "Unable to add file handler to logger . \n{}".format(str(err))) raise err # Adding logstash logging handler try: if self.xpr_config[self.LOGGING_SECTION][ self.LOGGING_LOGSTASH_BOOL]: cache_filename = "" if self.xpr_config[self.LOGGING_SECTION][ self.LOGSTASH_CACHE_BOOL]: cache_filename = os.path.join( log_folder, "cache.persistence") lh = AsynchronousLogstashHandler( host=self.xpr_config[self.LOGGING_SECTION][ self.LOGSTASH_HOST], port=self.xpr_config[self.LOGGING_SECTION][ self.LOGSTASH_PORT], database_path=cache_filename) lh.setFormatter(logstash_formatter) self.addHandler(lh) except Exception as err: print("Unable to add logstash handler to logger. \n{}".format( str(err))) raise err
def configure_logging(self): ''' Configure logging for a `MachineController` ''' # Filters str_format_filter = dist_zero.logging.StrFormatFilter() context = { 'env': settings.DIST_ZERO_ENV, 'mode': self.mode, 'runner': False, 'machine_id': self.id, 'machine_name': self.name, } if settings.LOGZ_IO_TOKEN: context['token'] = settings.LOGZ_IO_TOKEN context_filter = dist_zero.logging.ContextFilter(context) # Formatters human_formatter = dist_zero.logging.HUMAN_FORMATTER json_formatter = dist_zero.logging.JsonFormatter( '(asctime) (levelname) (name) (message)') # Handlers stdout_handler = logging.StreamHandler(sys.stdout) human_file_handler = logging.FileHandler( os.path.join(docker.DockerSimulatedHardware.CONTAINER_LOGS_DIR, 'output.log')) json_file_handler = logging.FileHandler( os.path.join(docker.DockerSimulatedHardware.CONTAINER_LOGS_DIR, 'output.json.log')) logstash_handler = AsynchronousLogstashHandler( settings.LOGSTASH_HOST, settings.LOGSTASH_PORT, database_path='/.logstash.db', ) stdout_handler.setLevel(logging.ERROR) human_file_handler.setLevel(logging.DEBUG) json_file_handler.setLevel(logging.DEBUG) logstash_handler.setLevel(logging.DEBUG) stdout_handler.setFormatter(human_formatter) human_file_handler.setFormatter(human_formatter) json_file_handler.setFormatter(json_formatter) logstash_handler.setFormatter(json_formatter) stdout_handler.addFilter(str_format_filter) human_file_handler.addFilter(str_format_filter) json_file_handler.addFilter(str_format_filter) json_file_handler.addFilter(context_filter) logstash_handler.addFilter(str_format_filter) logstash_handler.addFilter(context_filter) # Loggers dist_zero_logger = logging.getLogger('dist_zero') root_logger = logging.getLogger() root_logger.setLevel(logging.DEBUG) dist_zero.logging.set_handlers(root_logger, [ json_file_handler, human_file_handler, logstash_handler, stdout_handler, ])