def configure_logging(service_name): """ Configure logging based on the settings in the settings file. This sets up a handler for each logging mode that is enabled. See `microservice.core.settings.LoggingMode` for the supported logging types. :param str service_name: Name of the service being served by this instance. """ logger = logging.getLogger() logger.setLevel(settings.logging_level) formatter_kwargs = { 'fmt': json.dumps({'extra': { 'local_service': service_name, # Basic way to distinguish logs between instances of the same microservice. 'instance_id': random.randint(100000, 999999) }}) } formatter = LogstashFormatterV1(**formatter_kwargs) if settings.LoggingMode.FILE in settings.logging_modes: file_handler = logging.FileHandler('{}.log'.format(service_name)) file_handler.setFormatter(formatter) file_handler.addFilter(RequestIDLogFilter()) logger.addHandler(file_handler) if settings.LoggingMode.HUMAN in settings.logging_modes: stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setFormatter(HumanReadableLogstashFormatter(**formatter_kwargs)) stdout_handler.addFilter(RequestIDLogFilter()) logger.addHandler(stdout_handler) if settings.LoggingMode.STDOUT in settings.logging_modes: stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setFormatter(formatter) stdout_handler.addFilter(RequestIDLogFilter()) logger.addHandler(stdout_handler) if settings.LoggingMode.LOGSTASH in settings.logging_modes: # TODO: test this raise Exception("Warning: untested") logstash_handler = AsynchronousLogstashHandler( **settings.logstash_settings) logstash_handler.setFormatter(formatter) logstash_handler.addFilter(RequestIDLogFilter()) logger.addHandler(logstash_handler) if settings.LoggingMode.FLUENTD in settings.logging_modes: # TODO: test this raise Exception("Warning: untested") fluentd_handler = handler.FluentHandler( 'pycroservices.follow', **settings.fluentd_settings, buffer_overflow_handler=overflow_handler) fluentd_handler.setFormatter(formatter) fluentd_handler.addFilter(RequestIDLogFilter()) logger.addHandler(fluentd_handler)
def configure_logging(self): # Filters str_format_filter = dist_zero.logging.StrFormatFilter() context = { 'env': settings.DIST_ZERO_ENV, 'mode': runners.MODE_SIMULATED, 'runner': True, 'simulator_id': self.id, 'start_at': self._start_datetime, } if settings.LOGZ_IO_TOKEN: context['token'] = settings.LOGZ_IO_TOKEN context_filter = dist_zero.logging.ContextFilter(context) # Formatters human_formatter = dist_zero.logging.HUMAN_FORMATTER json_formatter = dist_zero.logging.JsonFormatter( '(asctime) (levelname) (name) (message)') # Handlers stdout_handler = logging.StreamHandler(sys.stdout) human_file_handler = logging.FileHandler('./.tmp/simulator.log') json_file_handler = logging.FileHandler('./.tmp/simulator.json.log') logstash_handler = AsynchronousLogstashHandler( settings.LOGSTASH_HOST, settings.LOGSTASH_PORT, database_path='./.tmp/logstash.db', ) stdout_handler.setLevel(logging.ERROR) human_file_handler.setLevel(logging.DEBUG) json_file_handler.setLevel(logging.DEBUG) logstash_handler.setLevel(logging.DEBUG) stdout_handler.setFormatter(human_formatter) human_file_handler.setFormatter(human_formatter) json_file_handler.setFormatter(json_formatter) logstash_handler.setFormatter(json_formatter) stdout_handler.addFilter(str_format_filter) human_file_handler.addFilter(str_format_filter) json_file_handler.addFilter(str_format_filter) json_file_handler.addFilter(context_filter) logstash_handler.addFilter(str_format_filter) logstash_handler.addFilter(context_filter) # Loggers dist_zero_logger = logging.getLogger('dist_zero') root_logger = logging.getLogger() dist_zero.logging.set_handlers(root_logger, [ json_file_handler, human_file_handler, logstash_handler, stdout_handler, ])
def configure_logging(self): ''' Configure logging for a `MachineController` ''' # Filters str_format_filter = dist_zero.logging.StrFormatFilter() context = { 'env': settings.DIST_ZERO_ENV, 'mode': self.mode, 'runner': False, 'machine_id': self.id, 'machine_name': self.name, } if settings.LOGZ_IO_TOKEN: context['token'] = settings.LOGZ_IO_TOKEN context_filter = dist_zero.logging.ContextFilter(context) # Formatters human_formatter = dist_zero.logging.HUMAN_FORMATTER json_formatter = dist_zero.logging.JsonFormatter( '(asctime) (levelname) (name) (message)') # Handlers stdout_handler = logging.StreamHandler(sys.stdout) human_file_handler = logging.FileHandler( os.path.join(docker.DockerSimulatedHardware.CONTAINER_LOGS_DIR, 'output.log')) json_file_handler = logging.FileHandler( os.path.join(docker.DockerSimulatedHardware.CONTAINER_LOGS_DIR, 'output.json.log')) logstash_handler = AsynchronousLogstashHandler( settings.LOGSTASH_HOST, settings.LOGSTASH_PORT, database_path='/.logstash.db', ) stdout_handler.setLevel(logging.ERROR) human_file_handler.setLevel(logging.DEBUG) json_file_handler.setLevel(logging.DEBUG) logstash_handler.setLevel(logging.DEBUG) stdout_handler.setFormatter(human_formatter) human_file_handler.setFormatter(human_formatter) json_file_handler.setFormatter(json_formatter) logstash_handler.setFormatter(json_formatter) stdout_handler.addFilter(str_format_filter) human_file_handler.addFilter(str_format_filter) json_file_handler.addFilter(str_format_filter) json_file_handler.addFilter(context_filter) logstash_handler.addFilter(str_format_filter) logstash_handler.addFilter(context_filter) # Loggers dist_zero_logger = logging.getLogger('dist_zero') root_logger = logging.getLogger() root_logger.setLevel(logging.DEBUG) dist_zero.logging.set_handlers(root_logger, [ json_file_handler, human_file_handler, logstash_handler, stdout_handler, ])