def send(self, s): if len(s) <= self.chunk_size: DatagramHandler.send(self, s) else: chunks = gelf.split(s, self.chunk_size) for chunk in chunks: DatagramHandler.send(self, chunk)
def __init__(self, host, port=12201, chunk_size=WAN_CHUNK, debugging_fields=True, extra_fields=True, fqdn=False): self.debugging_fields = debugging_fields self.extra_fields = extra_fields self.chunk_size = chunk_size self.fqdn = fqdn DatagramHandler.__init__(self, host, port)
def __init__(self, host, port=12202, gelf_chunker=GELFWarningChunker(), **kwargs): """Initialize the GELFUDPHandler .. note:: By default a :class:`.handler.GELFWarningChunker` is used as the ``gelf_chunker``. Thus, GELF messages that chunk overflow will issue a :class:`.handler.GELFChunkOverflowWarning` and will be dropped. :param host: GELF UDP input host. :type host: str :param port: GELF UDP input port. :type port: int :param gelf_chunker: :class:`.handler.BaseGELFChunker` instance to handle chunking larger GELF messages. :type gelf_chunker: GELFWarningChunker """ BaseGELFHandler.__init__(self, **kwargs) DatagramHandler.__init__(self, host, port) self.gelf_chunker = gelf_chunker
def __init__(self, host, port=12201, chunk_size=WAN_CHUNK, debugging_fields=True, extra_fields=True, fqdn=False, localname=None, facility=None, level_names=False, compress=True): BaseGELFHandler.__init__(self, host, port, chunk_size, debugging_fields, extra_fields, fqdn, localname, facility, level_names, compress) DatagramHandler.__init__(self, host, int(port))
def __init__(self, host, port=5959, message_type='logstash', fqdn=False, version=0, tags=None): DatagramHandler.__init__(self, host, port) tags = tags or [] if version == 1: self.formatter = formatter.LogstashFormatterVersion1(message_type, tags, fqdn) else: self.formatter = formatter.LogstashFormatterVersion0(message_type, tags, fqdn)
def __init__(self, host, port=12201, chunk_size=WAN_CHUNK, debugging_fields=True, extra_fields=True, fqdn=False, localname=None, facility=None): self.debugging_fields = debugging_fields self.extra_fields = extra_fields self.chunk_size = chunk_size self.fqdn = fqdn self.localname = localname self.facility = facility DatagramHandler.__init__(self, host, port)
def __init__(self, host, port=9700, max_packet_size=64*1024, debugging_fields=False, extra_fields=True, fqdn=False, localname=None, service="logstash", type="logs"): self.debugging_fields = debugging_fields self.extra_fields = extra_fields self.max_packet_size = max_packet_size self.fqdn = fqdn self.localname = localname self.service = service self.type = type DatagramHandler.__init__(self, host, port)
def __init__(self, host, port=9700, max_packet_size=64*1024, debugging_fields=False, extra_fields=True, fqdn=False, localname=None, index="logstash-%Y.%m.%d", type="logs"): self.debugging_fields = debugging_fields self.extra_fields = extra_fields self.max_packet_size = max_packet_size self.fqdn = fqdn self.localname = localname self.index = index self.type = type DatagramHandler.__init__(self, host, port)
def __init__(self, host, port, chunk_size=WAN_CHUNK): self.chunk_size = chunk_size # skip_list is used to filter additional fields in a log message. # It contains all attributes listed in # http://docs.python.org/library/logging.html#logrecord-attributes # plus exc_text, which is only found in the logging module source, # and id, which is prohibited by the GELF format. self.skip_list = set(['args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename', 'funcName', 'id', 'levelname', 'levelno', 'lineno', 'module', 'msecs', 'msecs', 'message', 'msg', 'name', 'pathname', 'process', 'processName', 'relativeCreated', 'thread', 'threadName']) DatagramHandler.__init__(self, host, port)
def __init__(self, host, port=5959, message_type='logstash', fqdn=False, version=0): DatagramHandler.__init__(self, host, port) if version == 1: self.formatter = formatter.LogstashFormatterVersion1( message_type, [], fqdn) else: self.formatter = formatter.LogstashFormatterVersion0( message_type, [], fqdn)
def __init__(self, host, port, compress=True, chunk_size=1300, **kwargs): """ Logging handler that transforms each record into GELF (graylog extended log format) and sends it over UDP. If message length exceeds chunk_size, the message splits into multiple chunks. The number of chunks must be less than 128. :param host: GELF UDP input host :param port: GELF UDP input port :param compress: compress message before sending it to the server or not :param chunk_size: length of a chunk, should be less than the MTU (maximum transmission unit) """ DatagramHandler.__init__(self, host, port) BaseHandler.__init__(self, compress=compress, **kwargs) self.chunk_size = chunk_size
def __init__(self, host, port, compress=True, chunk_size=1300, **kwargs): """ Logging handler that transforms each record into GELF (graylog extended log format) and sends it over UDP. If message length exceeds chunk_size, the message splits into multiple chunks. The number of chunks must be less than 128. :param host: GELF UDP input host :param port: GELF UDP input port :param compress: compress message before send it to the server or not :param chunk_size: length of a chunk, should be less than the MTU (maximum transmission unit) """ DatagramHandler.__init__(self, host, port) BaseHandler.__init__(self, **kwargs) self.compress = compress self.chunk_size = chunk_size
def __init__(self, host, port=12202, chunk_size=WAN_CHUNK, **kwargs): """Initialize the GELFUDPHandler :param host: GELF UDP input host. :type host: str :param port: GELF UDP input port. :type port: int :param chunk_size: Message chunk size. Messages larger than this size will be sent to Graylog in multiple chunks. :type chunk_size: int """ self.chunk_size = chunk_size BaseGELFHandler.__init__(self, **kwargs) DatagramHandler.__init__(self, host, port)
def __init__(self, host=None, port=None, source=None, role_list=None): DatagramHandler.__init__(self, host or constants.HOST, port or constants.PORT) # source_ip cls = self.__class__ if not cls.source_ip: try: # 一步步来,这样第二步报错时,起码也把名字设置上了 cls.source_ip = socket.gethostname() # 有些电脑上会很慢,还是不用了,就用名字算了 # cls.source_ip = socket.gethostbyname(cls.source_ip) except: pass cls.source_ip = cls.source_ip or 'none' self.source = source self.role_list = role_list
def log_init(log_path): _logger = logging.getLogger(logger_name) file_name = str(log_path).split('/')[-1].split('.')[0] datefmt = '%Y-%m-%d %I:%M:%S %p' fmt = '%(asctime)s-[%(levelname)s]-[' + file_name + ']: %(message)s' _logger.setLevel(logging.DEBUG) fh = logging.FileHandler(log_path) fh.setLevel(logging.INFO) fh.setFormatter(Formatter(fmt, datefmt)) _logger.addHandler(fh) ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.DEBUG) ch.setFormatter(MyFormatter(fmt, datefmt)) _logger.addHandler(ch) uh = DatagramHandler(const.const.TLOG_ADDR[0], const.const.TLOG_ADDR[1]) uh.setLevel(logging.CRITICAL) uh.setFormatter('%(message)s')
def __init__(self, host, port, measurement, debugging_fields=None, extra_fields=True, localname=None, fqdn=False, global_tags=None, sock=None): DatagramHandler.__init__(self, host, int(port)) self.measurement = measurement self.debugging_fields = debugging_fields self.extra_fields = extra_fields self.fqdn = fqdn self.localname = localname self.global_tags = global_tags.copy() if global_tags else {} for tag in self.global_tags: if tag in RESERVED_TAGS: raise ValueError("{!r} in global_tags impossible".format(tag)) self.sock = sock
def add_handlers(self, logger, formatter): for h in HANDLERS: handler = False if h == 'stderr': handler = StreamHandler() elif h == 'file': if LOGFILE is not None: handler = FileHandler(filename=LOGFILE) elif h == 'server': if HOST is not None and PORT is not None: handler = DatagramHandler(host=HOST, port=int(PORT)) if handler is not False: handler.setLevel(logging.INFO) handler.setFormatter(formatter) logger.addHandler(handler) return(logger)
def new_logger(self): """Create default logs""" logger = logging.getLogger(self.basename) handler = DatagramHandler('localhost', DEFAULT_UDP_LOGGING_PORT) logger.addHandler(handler) _level = (self.kwargs.get("level") or "INFO").upper() if _level == "DEBUG" or _level == "INFO": logger.setLevel(logging.INFO) elif _level == "WARNING": logger.setLevel(logging.WARNING) elif _level == "ERROR": logger.setLevel(logging.ERROR) elif _level == "CRITICAL": logger.setLevel(logging.CRITICAL) else: logger.setLevel(logging.ERROR) return logger
def config_logging(suffix=""): from logging.handlers import TimedRotatingFileHandler, DatagramHandler base_format = logging.Formatter("%(asctime)s %(levelname)s %(message)s", "%Y-%m-%d %H:%M:%S") filename = "log-" + suffix + ".log" file_handler = TimedRotatingFileHandler(filename=filename, when='midnight', backupCount=15) file_handler.setFormatter(base_format) logging.getLogger().addHandler(file_handler) udp_handler = DatagramHandler(host="10.47.54.115", port=10000) # 设置 udp 服务器地址 udp_handler.setFormatter(base_format) udp_handler.setLevel(level=logging.ERROR) logging.getLogger().addHandler(udp_handler) logging.getLogger().setLevel(level=logging.INFO)
def __init__(self, pipeline_id=None, host='127.0.0.1', port=50001): super(PipelineCallback, self).__init__() self._pipeline_id = pipeline_id if pipeline_id is not None else uuid4().hex self.logger = logging.getLogger(pipeline_id) handler = DatagramHandler(host, port) formatter = logging.Formatter(fmt="%(message)s") handler.setFormatter(formatter) handler.setLevel(logging.INFO) self.logger.addHandler(handler) self.logger.setLevel(logging.INFO)
def init_logging(logger=None, level="DEBUG", log_file="", init_handler=None, max_count=30, propagate=False, file_config=None, dict_config=None, unix_domain=None): fmt = DEFAULT_FMT datefmt = "%Y-%m-%d %H:%M:%S" formatter = logging.Formatter(fmt=fmt, datefmt=datefmt) level = getattr(logging, level.upper()) if log_file: if init_handler: handler = init_handler(log_file, max_count) else: handler = TimedRotatingFileHandler(log_file, when="midnight", interval=1, backupCount=max_count) elif unix_domain: handler = DatagramHandler(unix_domain, None) else: handler = logging.StreamHandler() handler.setLevel(level) handler.setFormatter(formatter) # Initialize the argument logger with the arguments, level and log_file. root = logging.getLogger() if logger: loggers = logger if isinstance(logger, (list, tuple)) else [logger] for _logger in loggers: _logger.propagate = propagate _logger.setLevel(level) _logger.addHandler(handler) if root and root is _logger: root = None if root: root.propagate = propagate root.setLevel(level) root.addHandler(handler) # Initialize logging by the configuration file, file_config. if file_config: logging.config.fileConfig(file_config, disable_existing_loggers=False) # Initialize logging by the dict configuration, dict_config. if dict_config and hasattr(logging.config, "dictConfig"): logging.config.dictConfig(dict_config)
def __init__(self, sName='', nPort=0): pPy = config.logger.get('dir') self.pHome = pPy + s log_level = config.logger.get('level') if not log_level: log_level = 'INFO' kLevel = _LOG_LEVEL_DICT.get(log_level) oLogger = getLogger(gHost) oLogger.propagate = 0 oLogger.setLevel(kLevel) self.oLogger = oLogger self.log = self.oLogger if nPort: self.hLog = DatagramHandler(sName, nPort) self.log.addHandler(self.hLog) elif sName: self.sName = sName self.pLog = '' Global.log = property(_logger) else: self.init_logger()
def log_init(log_path): _logger = logging.getLogger(logger_name) file_name = str(log_path).split('/')[-1].split('.')[0] datefmt = '%Y-%m-%d %I:%M:%S %p' fmt = '%(asctime)s-[%(levelname)s]-[' + file_name + ']: %(message)s' _logger.setLevel(logging.DEBUG) fh = logging.FileHandler(log_path) fh.setLevel(logging.ERROR) fh.setFormatter(Formatter(fmt, datefmt)) _logger.addHandler(fh) ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.DEBUG) ch.setFormatter(MyFormatter(fmt, datefmt)) _logger.addHandler(ch) uh = DatagramHandler(const.const.TLOG_ADDR[0], const.const.TLOG_ADDR[1]) uh.setLevel(logging.CRITICAL) uh.setFormatter('%(message)s')
def __init__(self, host, port=5959, message_type='logstash', fqdn=False): self.message_type = message_type self.fqdn = fqdn DatagramHandler.__init__(self, host, port)
sys_formatter = logging.Formatter(sys_format) # Console - all log messages with streamhandler console_handler = logging.StreamHandler() console_handler.setLevel(logging.DEBUG) console_handler.setFormatter(def_formatter) # Filehandler - warning and higher messages only, include current time # filename: {todays-date}.log # shares format with console/streamhandler file_handler = logging.FileHandler(f'{today_date}.log') file_handler.setLevel(logging.WARNING) file_handler.setFormatter(def_formatter) # SysLog Server - error and higher messages only, does not include current time sys_handler = DatagramHandler(host='127.0.0.1', port=514) sys_handler.setLevel(logging.ERROR) sys_handler.setFormatter(sys_formatter) logger = logging.getLogger() logger.setLevel(logging.DEBUG) logger.addHandler(file_handler) logger.addHandler(console_handler) logger.addHandler(sys_handler) def my_fun(n): for i in range(0, n): logging.debug(i) if i == 50: logging.warning("The value of i is 50.")
def __init__(self, path, host='localhost', port=8126): port = int(port) self.path = path # the eventual base of DatagramHandler is not new-style DatagramHandler.__init__(self, host, port)
def __init__(self, host, port, chunk_size=WAN_CHUNK): self.chunk_size = chunk_size DatagramHandler.__init__(self, host, port)
def __init__(self, topic, host, port): self.topic = topic DatagramHandler.__init__(self, host, port)
HOST, PORT = "localhost", 514 format = "%(asctime)s %(filename)s:%(lineno)-3d %(levelname)s %(message)s" syslog_format = "%(filename)s:%(lineno)-3d %(levelname)s %(message)s" formatter = logging.Formatter(format) file_handler = logging.FileHandler(f"{date.today()}.log") file_handler.setLevel(logging.WARNING) file_handler.setFormatter(formatter) console_handler = logging.StreamHandler() console_handler.setLevel(logging.DEBUG) console_handler.setFormatter(formatter) syslog_server_handler = DatagramHandler(HOST, PORT) syslog_server_handler.setLevel(logging.ERROR) syslog_server_handler.setFormatter(syslog_format) logger = logging.getLogger() logger.setLevel(logging.DEBUG) logger.addHandler(file_handler) logger.addHandler(console_handler) logger.addHandler(syslog_server_handler) def my_fun(n): for i in range(0, n): logging.debug(i) if i == 50: logging.warning("The value of i is 50.")
def __init__(self, host, port=5959, message_type="logstash", fqdn=False, version=0): DatagramHandler.__init__(self, host, port) if version == 1: self.formatter = formatter.LogstashFormatterVersion1(message_type, [], fqdn) else: self.formatter = formatter.LogstashFormatterVersion0(message_type, [], fqdn)
def udpHandler(self): udp_handler = DatagramHandler('127.0.0.1', 3333) udp_handler.setLevel(self.log_level) udp_handler.setFormatter(self.formatter) self.logger.addHandler(udp_handler)
def __init__(self, host, port, chunk_size=WAN_CHUNK, debugging_fields=True): self.debugging_fields = debugging_fields self.chunk_size = chunk_size DatagramHandler.__init__(self, host, port)
def __init__(self, host, port=5959, message_type="logstash", fqdn=False, version=0): self.message_type = message_type self.fqdn = fqdn self.version = version DatagramHandler.__init__(self, host, port)
sysformat = "%(filename)s:%(lineno)-4d %(levelname)s %(message)s" today = datetime.date.today() formatter = logging.Formatter(format) sysformatter = logging.Formatter(sysformat) file_handler = logging.FileHandler('{}.log'.format(today)) file_handler.setLevel(logging.WARNING) file_handler.setFormatter(formatter) console_handler = logging.StreamHandler() console_handler.setLevel(logging.DEBUG) console_handler.setFormatter(formatter) syslog_handler = DatagramHandler("127.0.0.1", 514) syslog_handler.setLevel(logging.ERROR) syslog_handler.setFormatter(sysformatter) logger = logging.getLogger() logger.setLevel(logging.DEBUG) logger.addHandler(file_handler) logger.addHandler(console_handler) logger.addHandler(syslog_handler) def my_fun(n): for i in range(0, n): logging.debug(i) if i == 50: logging.warning("The value of i is 50.")
def makeSocket(self): return DatagramHandler.makeSocket(self)
def __init__(self, host, port, signature=None): SplunkTcpHandler.__init__(self, host, port, signature) DatagramHandler.__init__(self, host, port)
log_format = "%(asctime)s %(filename)s:%(lineno)-4d %(levelname)s %(message)s" log_format_serv = "%(filename)s:%(lineno)-4d %(levelname)s %(message)s" formatter = logging.Formatter(log_format) formatter_serv = logging.Formatter(log_format_serv) file_handler = logging.FileHandler('{}.log'.format(datetime.date.today())) file_handler.setLevel(logging.WARNING) file_handler.setFormatter(formatter) console_handler = logging.StreamHandler() console_handler.setLevel(logging.DEBUG) console_handler.setFormatter(formatter) syslog_handler = DatagramHandler('127.0.0.1', 514) syslog_handler.setLevel(logging.ERROR) syslog_handler.setFormatter(formatter_serv) logger = logging.getLogger() logger.setLevel(logging.DEBUG) logger.addHandler(file_handler) logger.addHandler(console_handler) logger.addHandler(syslog_handler) def my_fun(n): for i in range(0, n): logging.debug(i) if i == 50: logging.warning("The value of i is 50.")
def send(self, s): if len(s) < self.chunk_size: DatagramHandler.send(self, s) else: for chunk in ChunkedGELF(s, self.chunk_size): DatagramHandler.send(self, chunk)
def __init__(self, host, port): DatagramHandler.__init__(self, host, port)