def emit(self, record): """ Emit a record. First check if the underlying file has changed, and if it has, close the old stream and reopen the file to get the current stream. """ # Reduce the chance of race conditions by stat'ing by path only # once and then fstat'ing our new fd if we opened a new log stream. # See issue #14632: Thanks to John Mulligan for the problem report # and patch. try: # stat the file by path, checking for existence sres = os.stat(self.baseFilename) except OSError as err: if err.errno == errno.ENOENT: sres = None else: raise # compare file system stat with that of our stream file handle if not sres or sres[ST_DEV] != self.dev or sres[ST_INO] != self.ino: if self.stream is not None: # we have an open file handle, clean it up self.stream.flush() self.stream.close() # open a new file handle and get new stat info from that fd self.stream = self._open() self._statstream() TimedRotatingFileHandler.emit(self, record)
class MultiProcessingLogHandler(logging.Handler): def __init__(self, *args, **kwargs): logging.Handler.__init__(self) self._handler = TimedRotatingFileHandler(*args, **kwargs) self.queue = multiprocessing.Queue(-1) self.stream = self._handler.stream t = threading.Thread(target=self.receive) t.daemon = True t.start() def setFormatter(self, fmt): logging.Handler.setFormatter(self, fmt) self._handler.setFormatter(fmt) def receive(self): while True: try: record = self.queue.get() self._handler.emit(record) except (KeyboardInterrupt, SystemExit): raise except EOFError: break except Exception, e: print "ERROR! %s %s" % (type(e), e) traceback.print_exc(file=sys.stderr)
class MultiProcessingLog(logging.Handler): def __init__(self, filename, when='d', interval=1, backup_count=0, encoding=None, delay=False, utc=False): logging.Handler.__init__(self) self._handler = TimedRotatingFileHandler(filename, when=when, interval=interval, backupCount=backup_count, encoding=encoding, delay=delay, utc=utc) self.queue = multiprocessing.Queue(-1) t = threading.Thread(target=self.receive) t.daemon = True t.start() def setFormatter(self, fmt): logging.Handler.setFormatter(self, fmt) self._handler.setFormatter(fmt) def receive(self): while True: try: record = self.queue.get() self._handler.emit(record) except (KeyboardInterrupt, SystemExit): raise except EOFError: break except: traceback.print_exc(file=sys.stderr) def send(self, s): self.queue.put_nowait(s) def _format_record(self, record): # ensure that exc_info and args have been stringified. # Removes any chance of unpickleable things inside and possibly reduces message size sent over the pipe if record.args: try: record.msg = record.msg % record.args except TypeError as e: print(str(e)) print(traceback.format_exc()) record.msg = record.msg + ", args: %s" % str(record.args) record.args = None if record.exc_info: dummy = self.format(record) record.exc_info = None return record def emit(self, record): try: s = self._format_record(record) self.send(s) except (KeyboardInterrupt, SystemExit): raise except: self.handleError(record) def close(self): self._handler.close() logging.Handler.close(self)
class TimedRotatingStreamListener(StreamListener): """ A Twitter stream listener that writes to compressed, timed rotating file. """ def __init__(self, log_dir, prefix, when_interval=None): """ Init with output path and name prefix. Default rollover interval is four hours. :param str log_dir: Where to write logged data. :param str prefix: Filename prefix for logged data. :param tuple when_interval: Listener rollover interval. See https://docs.python.org/2.7/library/logging.handlers.html #logging.handlers.TimedRotatingFileHandler for more information. """ log_dir = os.path.abspath(log_dir) if not os.path.exists(log_dir): os.mkdir(log_dir) if not isinstance(prefix, str): raise ValueError("Prefix string required for output files") if when_interval is None: when, interval = 'H', 1 else: when, interval = when_interval if not (isinstance(when, str) and isinstance(interval, int)): raise ValueError("Rollover interval should be like ('h', 1)") log_path = os.path.join(log_dir, prefix) _LOGGER.info("Saving data to path {}".format(log_path)) _LOGGER.info("Rollover interval {}" .format("{}{}".format(when, interval))) # Create the logger. self._logger = TimedRotatingFileHandler(filename=log_path, encoding='utf-8', when=when, interval=interval) def on_data(self, data): """ Log stream data. """ # Skip keep-alive newlines. data_stripped = data.strip() if len(data) > 0: record = LogRecord(None, None, None, None, data_stripped, (), None) self._logger.emit(record) def on_error(self, status): """ Print status to stderr and raise exception. """ sys.stderr.write('{}\n'.format(status)) raise TweepError(status)
class TimedRotatingStreamListener(StreamListener): """ A Twitter stream listener that writes to compressed, timed rotating file. """ def __init__(self, log_dir, prefix, when_interval=None): """ Init with output path and name prefix. Default rollover interval is four hours. :param str log_dir: Where to write logged data. :param str prefix: Filename prefix for logged data. :param tuple when_interval: Listener rollover interval. See https://docs.python.org/2.7/library/logging.handlers.html #logging.handlers.TimedRotatingFileHandler for more information. """ log_dir = os.path.abspath(log_dir) if not os.path.exists(log_dir): os.mkdir(log_dir) if not isinstance(prefix, str): raise ValueError("Prefix string required for output files") if when_interval is None: when, interval = 'H', 1 else: when, interval = when_interval if not (isinstance(when, str) and isinstance(interval, int)): raise ValueError("Rollover interval should be like ('h', 1)") log_path = os.path.join(log_dir, prefix) _LOGGER.info("Saving data to path {}".format(log_path)) _LOGGER.info("Rollover interval {}".format("{}{}".format( when, interval))) # Create the logger. self._logger = TimedRotatingFileHandler(filename=log_path, encoding='utf-8', when=when, interval=interval) def on_data(self, data): """ Log stream data. """ # Skip keep-alive newlines. data_stripped = data.strip() if len(data) > 0: record = LogRecord(None, None, None, None, data_stripped, (), None) self._logger.emit(record) def on_error(self, status): """ Print status to stderr and raise exception. """ sys.stderr.write('{}\n'.format(status)) raise TweepError(status)
def emit(self, record): """ Emit a record. If underlying file has changed, reopen the file before emitting the record to it. """ self.set_r_id(record) self.reopenIfNeeded() TimedRotatingFileHandler.emit(self, record)
def emit(self, record): """ Emit a record. Always check time """ try: if self.check_baseFilename(record): self.build_baseFilename() TimedRotatingFileHandler.emit(self, record) except (KeyboardInterrupt, SystemExit): raise except: self.handleError(record)
class MPFileHandler(logging.Handler): ''' Multiprocess-safe Rotating File Handler Copied from: http://stackoverflow.com/questions/641420/how-should-i-log-while-using-multiprocessing-in-python ''' def __init__(self, filename, when='h', interval=1, backupCount=0, encoding=None, delay=0, utc=0): ''' See TimedRotatingFileHandler for arg docs ''' logging.Handler.__init__(self) self._handler = TimedRotatingFileHandler(filename, when=when, interval=interval, backupCount=backupCount, encoding=encoding, delay=delay, utc=utc) self.queue = multiprocessing.Queue() t = threading.Thread(target=self.receive) t.daemon = True t.start() def setFormatter(self, fmt): logging.Handler.setFormatter(self, fmt) self._handler.setFormatter(fmt) def receive(self): while True: try: record = self.queue.get() self._handler.emit(record) except (KeyboardInterrupt, SystemExit): raise except EOFError: break except: traceback.print_exc(file=sys.stderr) def send(self, s): self.queue.put_nowait(s) def _format_record(self, record): # ensure that exc_info and args # have been stringified. Removes any chance of # unpickleable things inside and possibly reduces # message size sent over the pipe if record.args: record.msg = record.msg % record.args record.args = None if record.exc_info: dummy = self.format(record) record.exc_info = None return record def emit(self, record): try: s = self._format_record(record) self.send(s) except (KeyboardInterrupt, SystemExit): raise except: self.handleError(record) def close(self): self._handler.close() logging.Handler.close(self)
def emit(self, record): """ Emit the log record. """ record.__dict__['user'] = self.user record.__dict__['host'] = self.host return TimedRotatingFileHandler.emit(self, record)
import fileinput import json import argparse import sys from logging.handlers import TimedRotatingFileHandler class NoFormatter(): def format(self, record): return record.strip() if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument("logdir") args = parser.parse_args() logger = TimedRotatingFileHandler(args.logdir + '/reqlog.jl', when='h', utc=True) formatter = NoFormatter() logger.setFormatter(formatter) for line in sys.stdin: logger.emit(line) logger.flush()
def emit(self, record): if not record.levelno == DEBUG: return TimedRotatingFileHandler.emit(self, record)
class MultiProcessingLog(logging.Handler): def __init__(self, filename, when='h', interval=1, backupCount=0, encoding=None, delay=False, utc=False): self.filename = filename logging.Handler.__init__(self) self._handler = TimedRotatingFileHandler(filename, when, interval, backupCount, encoding, delay, utc) self.queue = multiprocessing.Queue(-1) t = threading.Thread(target=self.receive) t.daemon = True t.start() def setFormatter(self, fmt): logging.Handler.setFormatter(self, fmt) self._handler.setFormatter(fmt) def receive(self): while True: try: record = self.queue.get() self._handler.emit(record) except (KeyboardInterrupt, SystemExit): raise except EOFError: break except: traceback.print_exc(file=sys.stderr) def send(self, s): self.queue.put_nowait(s) def _format_record(self, record): # ensure that exc_info and args # have been stringified. Removes any chance of # unpickleable things inside and possibly reduces # message size sent over the pipe if record.args: record.msg = record.msg % record.args record.args = None if record.exc_info: dummy = self.format(record) record.exc_info = None return record def emit(self, record): try: s = self._format_record(record) self.send(s) except (KeyboardInterrupt, SystemExit): raise except: self.handleError(record) def close(self): self._handler.close() logging.Handler.close(self)
class MPFileHandler(logging.Handler): ''' Multiprocess-safe Rotating File Handler Copied from: http://stackoverflow.com/questions/641420/how-should-i-log-while-using-multiprocessing-in-python ''' def __init__(self, filename, when='h', interval=1, backupCount=0, encoding=None, delay=0, utc=0): ''' See TimedRotatingFileHandler for arg docs ''' logging.Handler.__init__(self) self._handler = TimedRotatingFileHandler(filename, when=when, interval=interval, backupCount=backupCount, encoding=encoding, delay=delay, utc=utc) self.queue = multiprocessing.Queue() t = threading.Thread(target=self.receive) t.daemon = True t.start() def setFormatter(self, fmt): logging.Handler.setFormatter(self, fmt) self._handler.setFormatter(fmt) def receive(self): while True: try: record = self.queue.get() self._handler.emit(record) except (KeyboardInterrupt, SystemExit): raise except EOFError: break except: traceback.print_exc(file=sys.stderr) def send(self, s): self.queue.put_nowait(s) def _format_record(self, record): # ensure that exc_info and args # have been stringified. Removes any chance of # unpickleable things inside and possibly reduces # message size sent over the pipe if record.args: record.msg = record.msg % record.args record.args = None if record.exc_info: dummy = self.format(record) record.exc_info = None return record def emit(self, record): try: s = self._format_record(record) self.send(s) except (KeyboardInterrupt, SystemExit): raise except: self.handleError(record) def close(self): self._handler.close() logging.Handler.close(self)