def _setup_logging(self, log_level): """Reinitialize logging post-fork to clear all handlers, file descriptors, locks etc. This must happen first thing post-fork, before any further logging is emitted. """ # Re-initialize the childs logging locks post-fork to avoid potential deadlocks if pre-fork # threads have any locks acquired at the time of fork. logging._lock = threading.RLock() if logging.thread else None for handler in logging.getLogger().handlers: handler.createLock() # Invoke a global teardown for all logging handlers created before now. logging.shutdown() # Reinitialize logging for the daemon context. setup_logging(log_level, console_stream=None, log_dir=self._log_dir, log_name=self.LOG_NAME) # Close out pre-fork file descriptors. self._close_fds() # Redirect stdio to the root logger. sys.stdout = _StreamLogger(logging.getLogger(), logging.INFO) sys.stderr = _StreamLogger(logging.getLogger(), logging.WARN) self._logger.debug('logging initialized')
def _setup_logging(self, global_options): """Sets global logging.""" # N.B. quiet help says 'Squelches all console output apart from errors'. level = 'ERROR' if global_options.quiet else global_options.level.upper() setup_logging(level, log_dir=global_options.logdir) # This routes warnings through our loggers instead of straight to raw stderr. logging.captureWarnings(True)
def _setup_logging(self, global_options): """Sets global logging.""" # N.B. quiet help says 'Squelches all console output apart from errors'. level = 'ERROR' if global_options.quiet else global_options.level.upper( ) setup_logging(level, console_stream=sys.stderr, log_dir=global_options.logdir)
def _pantsd_logging(self): """A context manager that runs with pantsd logging. Asserts that stdio (represented by file handles 0, 1, 2) is closed to ensure that we can safely reuse those fd numbers. """ # Ensure that stdio is closed so that we can safely reuse those file descriptors. for fd in (0, 1, 2): try: os.fdopen(fd) raise AssertionError( 'pantsd logging cannot initialize while stdio is open: {}'. format(fd)) except OSError: pass # Redirect stdio to /dev/null for the rest of the run, to reserve those file descriptors # for further forks. with stdio_as(stdin_fd=-1, stdout_fd=-1, stderr_fd=-1): # Reinitialize logging for the daemon context. result = setup_logging(self._log_level, log_dir=self._log_dir, log_name=self.LOG_NAME) # Do a python-level redirect of stdout/stderr, which will not disturb `0,1,2`. # TODO: Consider giving these pipes/actual fds, in order to make them "deep" replacements # for `1,2`, and allow them to be used via `stdio_as`. sys.stdout = _LoggerStream(logging.getLogger(), logging.INFO, result.log_handler) sys.stderr = _LoggerStream(logging.getLogger(), logging.WARN, result.log_handler) self._logger.debug('logging initialized') yield result.log_handler.stream
def _pantsd_logging(self): """A context manager that runs with pantsd logging. Asserts that stdio (represented by file handles 0, 1, 2) is closed to ensure that we can safely reuse those fd numbers. """ # Ensure that stdio is closed so that we can safely reuse those file descriptors. for fd in (0, 1, 2): try: os.fdopen(fd) raise AssertionError( 'pantsd logging cannot initialize while stdio is open: {}'.format(fd)) except OSError: pass # Redirect stdio to /dev/null for the rest of the run, to reserve those file descriptors # for further forks. with stdio_as(stdin_fd=-1, stdout_fd=-1, stderr_fd=-1): # Reinitialize logging for the daemon context. result = setup_logging(self._log_level, log_dir=self._log_dir, log_name=self.LOG_NAME) # Do a python-level redirect of stdout/stderr, which will not disturb `0,1,2`. # TODO: Consider giving these pipes/actual fds, in order to make them "deep" replacements # for `1,2`, and allow them to be used via `stdio_as`. sys.stdout = _LoggerStream(logging.getLogger(), logging.INFO, result.log_handler) sys.stderr = _LoggerStream(logging.getLogger(), logging.WARN, result.log_handler) self._logger.debug('logging initialized') yield result.log_handler.stream
def logger(self, level, file_logging=False): logger = logging.getLogger(str(uuid.uuid4())) with closing(six.StringIO()) as stream: with self.log_dir(file_logging) as log_dir: log_file = setup_logging(level, console_stream=stream, log_dir=log_dir, scope=logger.name) yield logger, stream, log_file
def _setup_logging(self, log_level): """Initializes logging.""" # Reinitialize logging for the daemon context. result = setup_logging(log_level, log_dir=self._log_dir, log_name=self.LOG_NAME) # Close out tty file descriptors. self._close_fds() # Redirect stdio to the root logger. sys.stdout = _LoggerStream(logging.getLogger(), logging.INFO, result.log_stream) sys.stderr = _LoggerStream(logging.getLogger(), logging.WARN, result.log_stream) self._logger.debug('logging initialized') return result.log_stream
def _setup_logging(self, global_options): """Sets global logging.""" # N.B. quiet help says 'Squelches all console output apart from errors'. level = "ERROR" if global_options.quiet else global_options.level.upper() setup_logging(level, console_stream=sys.stderr, log_dir=global_options.logdir)
def _setup_logging(self, global_options): # NB: quiet help says 'Squelches all console output apart from errors'. level = 'ERROR' if global_options.quiet else global_options.level.upper() setup_logging(level, log_dir=global_options.logdir)
def _setup_logging(self, quiet, level, log_dir): """Initializes logging.""" # N.B. quiet help says 'Squelches all console output apart from errors'. level = 'ERROR' if quiet else level.upper() setup_logging(level, console_stream=sys.stderr, log_dir=log_dir)
def logger(self, level, file_logging=False): logger = logging.getLogger(str(uuid.uuid4())) with closing(six.StringIO()) as stream: with self.log_dir(file_logging) as log_dir: log_file = setup_logging(level, console_stream=stream, log_dir=log_dir, scope=logger.name) yield logger, stream, log_file.log_filename