def log_to_socket(level=logging.INFO, host=config.TCP_LOGGING_HOST, port=config.TCP_LOGGING_PORT): """Log all Lantz events to a socket with a specific host address and port. Parameters ---------- level : logging level for the lantz handler (Default value = logging.INFO) host : socket host (default config.TCP_LOGGING_HOST) port : socket port (default config.TCP_LOGGING_PORT) Returns ------- type lantz logger """ handler = SocketHandler(host, port) handler.setLevel(level) LOGGER.addHandler(handler) if LOGGER.getEffectiveLevel() > level: LOGGER.setLevel(level) return LOGGER
def __init__(self, url, exchange='logging.gelf', debugging_fields=True, extra_fields=True, fqdn=False, exchange_type='fanout', localname=None, facility=None, virtual_host='/'): self.url = url parsed = urlparse(url) if parsed.scheme != 'amqp': raise ValueError('invalid URL scheme (expected "amqp"): %s' % url) host = parsed.hostname or 'localhost' port = _ifnone(parsed.port, 5672) virtual_host = virtual_host if not urllib.unquote(parsed.path[1:]) else urllib.unquote(parsed.path[1:]) self.cn_args = { 'host': '%s:%s' % (host, port), 'userid': _ifnone(parsed.username, 'guest'), 'password': _ifnone(parsed.password, 'guest'), 'virtual_host': virtual_host, 'insist': False, } self.exchange = exchange self.debugging_fields = debugging_fields self.extra_fields = extra_fields self.fqdn = fqdn self.exchange_type = exchange_type self.localname = localname self.facility = facility self.virtual_host = virtual_host SocketHandler.__init__(self, host, port) self.addFilter(ExcludeFilter('amqplib'))
def __init__(self, host='localhost', port=5672, username='******', password='******', exchange='logstash', exchange_type='fanout', virtual_host='/', message_type='logstash', tags=None, durable=False, version=0, extra_fields=True, fqdn=False, facility=None, exchange_routing_key=''): # AMQP parameters self.host = host self.port = port self.username = username self.password = password self.exchange_type = exchange_type self.exchange = exchange self.exchange_is_durable = durable self.virtual_host = virtual_host self.routing_key = exchange_routing_key SocketHandler.__init__(self, host, port) # Extract Logstash paramaters self.tags = tags or [] fn = formatter.LogstashFormatterVersion1 if version == 1 \ else formatter.LogstashFormatterVersion0 self.formatter = fn(message_type, tags, fqdn) # Standard logging parameters self.extra_fields = extra_fields self.fqdn = fqdn self.facility = facility
def __init__(self, url, exchange='logging.gelf', exchange_type='fanout', virtual_host='/'): self.url = url parsed = urlparse(url) if parsed.scheme != 'amqp': raise ValueError('invalid URL scheme (expected "amqp"): %s' % url) host = parsed.hostname or 'localhost' port = _ifnone(parsed.port, 5672) virtual_host = virtual_host if not unquote(parsed.path[1:]) \ else unquote(parsed.path[1:]) self.cn_args = { 'host': '%s:%s' % (host, port), 'userid': _ifnone(parsed.username, 'guest'), 'password': _ifnone(parsed.password, 'guest'), 'virtual_host': virtual_host, 'insist': False, } self.exchange = exchange self.exchange_type = exchange_type self.virtual_host = virtual_host SocketHandler.__init__(self, host, port) self.addFilter(ExcludeFilter('amqp'))
def init_log(logfile="log.log", level="INFO", server_addr=None): if len(root.handlers) is 0: # root record all root.setLevel(0) fmt = "%(asctime)s %(name)s,line:%(lineno)d [%(levelname)s] %(message)s" fmter = Formatter(fmt=fmt) # display on screen s_handler = StreamHandler() s_handler.setLevel(level) s_handler.setFormatter(fmter) root.addHandler(s_handler) # write all levels to logfile f_handler = FileHandler(logfile) # f_handler.setLevel(0) f_handler.setFormatter(fmter) root.addHandler(f_handler) # TCP handler if server_addr is not None: t_handler = SocketHandler(*server_addr) # t_handler.setLevel(0) t_handler.setFormatter(fmter) root.addHandler(t_handler) else: raise RuntimeError("init_debug() can only call once.")
def __init__(self, url, exchange='logging.gelf', debugging_fields=True, extra_fields=True, fqdn=False, exchange_type='fanout', localname=None, facility=None, virtual_host='/'): self.url = url parsed = urlparse(url) if parsed.scheme != 'amqp': raise ValueError('invalid URL scheme (expected "amqp"): %s' % url) host = parsed.hostname or 'localhost' port = _ifnone(parsed.port, 5672) virtual_host = virtual_host if not urllib.unquote( parsed.path[1:]) else urllib.unquote(parsed.path[1:]) self.cn_args = { 'host': '%s:%s' % (host, port), 'userid': _ifnone(parsed.username, 'guest'), 'password': _ifnone(parsed.password, 'guest'), 'virtual_host': virtual_host, 'insist': False, } self.exchange = exchange self.debugging_fields = debugging_fields self.extra_fields = extra_fields self.fqdn = fqdn self.exchange_type = exchange_type self.localname = localname self.facility = facility self.virtual_host = virtual_host SocketHandler.__init__(self, host, port) self.addFilter(ExcludeFilter('amqplib'))
def __init__(self, host, port=12201, chunk_size=WAN_CHUNK, debugging_fields=True, extra_fields=True, fqdn=False, localname=None, facility=None, level_names=False, compress=False): BaseGELFHandler.__init__(self, host, port, chunk_size, debugging_fields, extra_fields, fqdn, localname, facility, level_names, compress) SocketHandler.__init__(self, host, int(port))
def initlog(logger_name, host, port, logLevel=logging.INFO): if logger_name not in logging.Logger.manager.loggerDict: logger = logging.getLogger(logger_name) handler = SocketHandler(host, port) handler.setLevel(logLevel) logger.addHandler(handler) logger.setLevel(logLevel) return logging.getLogger(logger_name)
def __init__(self, host, port, prefix=''): SocketHandler.__init__(self, host, port) self.closeOnError = 1 self.prefix = prefix.format(hostname=socket.getfqdn()) self.queue = Queue() self.thread = Thread(target=self.run) self.thread.setDaemon(True) self.thread.start()
def initlog(logger_name, host, port, logLevel=logging.DEBUG): if logger_name not in logging.Logger.manager.loggerDict: logger = logging.getLogger(logger_name) handler = SocketHandler(host, port) handler.setLevel(logLevel) logger.addHandler(handler) logger.setLevel(logLevel) return logging.getLogger(logger_name)
def new_log(name, hdfile=None, hdsock=None, open_hdstream=True, level=logging.INFO): """ 创建日志打印实例 @param name: 日志模块名 @param hdfile: 文件地址 @param hdsock: 网络地址, 'IP:PORT' @param open_hdstream: 是否把日志同时打印到控制台, 默认开启 @param level: 日志打印级别 """ # 初始化日志配置 log = logging.getLogger(name) log.setLevel(level) formatter = logging.Formatter("%(asctime)s [%(levelname).3s] %(message)s", "%Y-%m-%d %H:%M:%S") # 打印日志到文件 if hdfile: hdfile = get_logfile_name(hdfile) file_handler = logging.FileHandler(hdfile) file_handler.setFormatter(formatter) log.addHandler(file_handler) # 打印日志到网络 if hdsock: from logging.handlers import SocketHandler host, port = hdsock.split(":") socket_handler = SocketHandler(host, int(port)) socket_handler.setFormatter(formatter) log.addHandler(socket_handler) # 打印日志到控制台 if open_hdstream: formatter = logging.Formatter("%(asctime)s [%(levelname).3s] - [%(name).8s] %(message)s", "%Y-%m-%d %H:%M:%S") stream_handler = logging.StreamHandler() stream_handler.setFormatter(formatter) log.addHandler(stream_handler) def exception(mod=""): """ 记录详细代码异常日志 Example: 2014-12-29 18:56:35 [ERR] uid, ZeroDivisionError: integer division or modulo by zero, print 0/0, File "/Users/chris/develop/masdk/utils/logger.py", line 46, in <module> """ message = ','.join(traceback.format_exc().split('\n')[::-1][1:-1]) if mod: message = "%s, %s" % (mod, message) log.error(message) # 支持缩写方式调用 log.msg = log.info log.err = log.error log.war = log.warning log.cri = log.critical log.exception = exception log.exc = log.exception return log
def _create_cutelog_logger(name: str) -> logging.Logger: logger = logging.getLogger(name) logger.setLevel(logging.DEBUG) logger.propagate = False socket_handler = SocketHandler('127.0.0.1', 19996) socket_handler.setFormatter( logging.Formatter( "%(asctime)s - %(levelname)s - %(name)s - %(message)s")) logger.addHandler(socket_handler) return logger
def __init__(self, host, port, **kwargs): """ Logging handler that transforms each record into GELF (graylog extended log format) and sends it over TCP. :param host: GELF TCP input host :param port: GELF TCP input port """ SocketHandler.__init__(self, host, port) BaseHandler.__init__(self, **kwargs)
def __init__(self, host, port, use_tls=False, cert_reqs=ssl.CERT_NONE, ca_certs=None): """description of __init__""" SocketHandler.__init__(self, host, port) self.ca_certs = ca_certs self.cert_reqs = cert_reqs self.use_tls = use_tls
def __init__(self, url, exchange="logging.gelf", exchange_type="fanout", virtual_host="/", routing_key="", ssl=False, heartbeat=0, **kwargs): """Initialize the GELFRabbitHandler :param url: RabbitMQ URL (ex: amqp://guest:guest@localhost:5672/) :type url: str :param exchange: RabbitMQ exchange. A queue binding must be defined on the server to prevent GELF logs from being dropped. :type exchange: str :param exchange_type: RabbitMQ exchange type. :type exchange_type: str :param virtual_host: :type virtual_host: str :param routing_key: :type routing_key: str :param ssl: whether to add TLS to the connection :type ssl: bool """ self.url = url parsed = urlparse(url) if parsed.scheme != "amqp": raise ValueError('invalid URL scheme (expected "amqp"): %s' % url) host = parsed.hostname or "localhost" port = _ifnone(parsed.port, 5672) self.virtual_host = (virtual_host if not unquote(parsed.path[1:]) else unquote(parsed.path[1:])) self.cn_args = { "host": "%s:%s" % (host, port), "userid": _ifnone(parsed.username, "guest"), "password": _ifnone(parsed.password, "guest"), "virtual_host": self.virtual_host, "insist": False, "ssl": ssl, "heartbeat": heartbeat } self.exchange = exchange self.exchange_type = exchange_type self.routing_key = routing_key BaseGELFHandler.__init__(self, **kwargs) SocketHandler.__init__(self, host, port) self.addFilter(ExcludeFilter("amqp"))
def async_send_log_to_socket(host, port, msg): """ 异步发送日志到目标socket :param host: :param port: :param msg: :return: """ # !!!!!!!! 绝对不要在这里使用 current_app.logger 打印任何日志,否则会出现循环调用 print('async_send_log_to_socket: ', msg) handler = SocketHandler(host=host, port=port) handler.send(msg.encode('utf8'))
def log_to_socket(level=logging.INFO, host="localhost", port=DEFAULT_TCP_LOGGING_PORT): """Log all Lantz events to a socket with a specific host address and port. :param level: logging level for the lantz handler :param host: socket host (default 'localhost') :param port: socket port (default DEFAULT_TCP_LOGGING_PORT as defined in the logging module) :return: lantz logger """ handler = SocketHandler(host, port) handler.setLevel(level) LOGGER.addHandler(handler) if LOGGER.getEffectiveLevel() > level: LOGGER.setLevel(level) return LOGGER
def init_logging(app): log_file_path = app.config['log_file_path'] if not os.path.exists(os.path.dirname(log_file_path)): raise Exception('Failed to open log file: no such directory %s' % os.path.dirname(log_file_path)) del app.logger.handlers[:] # consoleHandler = logging.StreamHandler() # consoleHandler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]')) # app.logger.addHandler(consoleHandler) socketHandler = SocketHandler('localhost', DEFAULT_TCP_LOGGING_PORT) socketHandler.setLevel(app.config['LOG_LEVEL']) socketHandler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]')) app.logger.addHandler(socketHandler) app.logger.setLevel(app.config['LOG_LEVEL'])
def __init__(self, host='localhost', port=5672, username='******', password='******', exchange='logstash', exchange_type='fanout', virtual_host='/', message_type='logstash', tags=None, durable=False, passive=False, extra_fields=True, fqdn=False, facility=None, exchange_routing_key='', limit_stacktrace=0, limit_string_fields=0, limit_containers=0): # AMQP parameters self.host = host self.port = port self.username = username self.password = password self.exchange_type = exchange_type self.exchange = exchange self.exchange_is_durable = durable self.declare_exchange_passively = passive self.virtual_host = virtual_host self.routing_key = exchange_routing_key SocketHandler.__init__(self, host, port) # Extract Logstash paramaters self.tags = tags or [] self.formatter = formatter.LogstashFormatter( message_type, tags, fqdn, limit_stacktrace=limit_stacktrace, limit_string_fields=limit_string_fields, limit_containers=limit_containers) # Standard logging parameters self.extra_fields = extra_fields self.fqdn = fqdn self.facility = facility
def makeSocket(self): """makeSocket""" sock = SocketHandler.makeSocket(self, timeout=self.sock_timeout) if self.use_tls is True: return ssl.wrap_socket(sock, cert_reqs=self.cert_reqs, \ ca_certs=self.ca_certs) return sock
def makeSocket(self, timeout=1): s = SocketHandler.makeSocket(self, timeout) if self.ssl: return ssl.wrap_socket(s, keyfile=self.keyfile, certfile=self.certfile) return s
def log_to_socket(level=logging.INFO, host='localhost', port=DEFAULT_TCP_LOGGING_PORT): """Log all Lantz events to a socket with a specific host address and port. :param level: logging level for the lantz handler :param host: socket host (default 'localhost') :param port: socket port (default DEFAULT_TCP_LOGGING_PORT as defined in the logging module) :return: lantz logger """ handler = SocketHandler(host, port) handler.setLevel(level) LOGGER.addHandler(handler) if LOGGER.getEffectiveLevel() > level: LOGGER.setLevel(level) return LOGGER
def __init__(self, url, exchange='logging.gelf', exchange_type='fanout', virtual_host='/', routing_key='', **kwargs): """Initialize the GELFRabbitHandler :param url: RabbitMQ URL (ex: amqp://guest:guest@localhost:5672/) :type url: str :param exchange: RabbitMQ exchange. A queue binding must be defined on the server to prevent GELF logs from being dropped. :type exchange: str :param exchange_type: RabbitMQ exchange type. :type exchange_type: str :param virtual_host: :type virtual_host: str :param routing_key: :type routing_key: str """ self.url = url parsed = urlparse(url) if parsed.scheme != 'amqp': raise ValueError('invalid URL scheme (expected "amqp"): %s' % url) host = parsed.hostname or 'localhost' port = _ifnone(parsed.port, 5672) self.virtual_host = virtual_host if not unquote( parsed.path[1:]) else unquote(parsed.path[1:]) self.cn_args = { 'host': '%s:%s' % (host, port), 'userid': _ifnone(parsed.username, 'guest'), 'password': _ifnone(parsed.password, 'guest'), 'virtual_host': self.virtual_host, 'insist': False, } self.exchange = exchange self.exchange_type = exchange_type self.routing_key = routing_key BaseGELFHandler.__init__(self, **kwargs) SocketHandler.__init__(self, host, port) self.addFilter(ExcludeFilter('amqplib'))
def getLogger(name, level=logging.DEBUG): logger = logging.Logger(name) socket_handler = SocketHandler('localhost', log_settings.Instance.PORT) datefmt = "%Y-%m-%d %H:%M:%S" format_str = "[%(asctime)s]: %(levelname)s - %(message)s" formatter = logging.Formatter(format_str, datefmt) socket_handler.setFormatter(formatter) socket_handler.setLevel(level) stream_handler = logging.StreamHandler() stream_handler.setFormatter(formatter) stream_handler.setLevel(level) logger.addHandler(socket_handler) logger.addHandler(stream_handler) return logger
def __init__(self, url, exchange='logging.gelf', exchange_type='fanout', virtual_host='/', routing_key='', **kwargs): """Initialize the GELFRabbitHandler :param url: RabbitMQ URL (ex: amqp://guest:guest@localhost:5672/) :type url: str :param exchange: RabbitMQ exchange. (default 'logging.gelf'). A queue binding must be defined on the server to prevent log messages from being dropped. :type exchange: str :param exchange_type: RabbitMQ exchange type (default 'fanout'). :type exchange_type: str :param virtual_host: :type virtual_host: str :param routing_key: :type routing_key: str """ self.url = url parsed = urlparse(url) if parsed.scheme != 'amqp': raise ValueError('invalid URL scheme (expected "amqp"): %s' % url) host = parsed.hostname or 'localhost' port = _ifnone(parsed.port, 5672) self.virtual_host = virtual_host if not unquote( parsed.path[1:]) else unquote(parsed.path[1:]) self.cn_args = { 'host': '%s:%s' % (host, port), 'userid': _ifnone(parsed.username, 'guest'), 'password': _ifnone(parsed.password, 'guest'), 'virtual_host': self.virtual_host, 'insist': False, } self.exchange = exchange self.exchange_type = exchange_type self.routing_key = routing_key BaseGELFHandler.__init__( self, **kwargs ) SocketHandler.__init__(self, host, port) self.addFilter(ExcludeFilter('amqplib'))
def create_logger(self): self.logger = logging.getLogger('coordinator') self.logger.addHandler(logging.FileHandler('coordinator.log', mode='a')) self.logger.info("Initializing Coordinator") self.logger.setLevel(1) socket_handler = SocketHandler('0.0.0.0', 19996) self.logger.addHandler(socket_handler) self.logger.info("Created logger for coordinator")
def __init__(self, host, port=12201, **kwargs): """Initialize the GELFTCPHandler :param host: GELF TCP input host. :type host: str :param port: GELF TCP input port. :type port: int .. attention:: GELF TCP does not support compression due to the use of the null byte (``\\0``) as frame delimiter. Thus, :class:`.handler.GELFTCPHandler` does not support setting ``compress`` to :obj:`True` and is locked to :obj:`False`. """ BaseGELFHandler.__init__(self, compress=False, **kwargs) SocketHandler.__init__(self, host, port)
def test_handle(self, record, response, server): sh = SocketHandler("localhost", "9999") stream = sh.makePickle(record) def recv(n): # TODO: Can use nonlocal on Python 3 idx = recv.idx s = stream[idx : idx + n] recv.idx += n return s recv.idx = 0 with patch.object(_Handler, "_handle_log_record") as handle_log_record: handler = _Handler(response, "localhost:9999", server) with patch.object(handler, "connection", new=Mock(recv=recv)) as conn: handler.handle() handle_log_record.assert_called_with(record)
def __init__( self, host="localhost", port=5672, username="******", password="******", exchange="logstash", exchange_type="fanout", virtual_host="/", message_type="logstash", tags=None, durable=False, passive=False, version=0, extra_fields=True, fqdn=False, facility=None, exchange_routing_key="", ): # AMQP parameters self.host = host self.port = port self.username = username self.password = password self.exchange_type = exchange_type self.exchange = exchange self.exchange_is_durable = durable self.declare_exchange_passively = passive self.virtual_host = virtual_host self.routing_key = exchange_routing_key SocketHandler.__init__(self, host, port) # Extract Logstash paramaters self.tags = tags or [] fn = (formatter.LogstashFormatterVersion1 if version == 1 else formatter.LogstashFormatterVersion0) self.formatter = fn(message_type, tags, fqdn) # Standard logging parameters self.extra_fields = extra_fields self.fqdn = fqdn self.facility = facility
def setup(logger, zodb_root, sqlalchemy_uri, dblog=False, restore=False): app = IndicoFlask('indico_migrate') app.config['PLUGINENGINE_NAMESPACE'] = 'indico.plugins' app.config['SQLALCHEMY_DATABASE_URI'] = sqlalchemy_uri app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True _monkeypatch_config() plugin_engine.init_app(app) if not plugin_engine.load_plugins(app): print( cformat('%[red!]Could not load some plugins: {}%[reset]').format( ', '.join(plugin_engine.get_failed_plugins(app)))) sys.exit(1) db.init_app(app) if dblog: app.debug = True apply_db_loggers(app, force=True) db_logger = Logger.get('_db') db_logger.level = logging.DEBUG db_logger.propagate = False db_logger.addHandler(SocketHandler('127.0.0.1', 9020)) # avoid "no handlers registered" warnings logging.root.addHandler(logging.NullHandler()) import_all_models() configure_mappers() alembic_migrate.init_app(app, db, os.path.join(app.root_path, 'migrations')) try: tz = pytz.timezone( getattr(zodb_root['MaKaCInfo']['main'], '_timezone', 'UTC')) except KeyError: tz = pytz.utc with app.app_context(): if not restore: all_tables = sum(get_all_tables(db).values(), []) if all_tables: if db_has_data(): logger.fatal_error( 'Your database is not empty!\n' 'If you want to reset it, please drop and recreate it first.' ) else: # the DB is empty, prepare DB tables # prevent alembic from messing with the logging config tmp = logging.config.fileConfig logging.config.fileConfig = lambda fn: None prepare_db(empty=True, root_path=get_root_path('indico'), verbose=False) logging.config.fileConfig = tmp _create_oauth_apps() return app, tz
def __init__(self, host, port=12201, chunk_size=WAN_CHUNK, debugging_fields=True, extra_fields=True, fqdn=False, localname=None, facility=None, level_names=False, tls=False, tls_server_name=None, tls_cafile=None, tls_capath=None, tls_cadata=None, tls_client_cert=None, tls_client_key=None, tls_client_password=None): BaseGELFHandler.__init__(self, host, port, chunk_size, debugging_fields, extra_fields, fqdn, localname, facility, level_names, False) SocketHandler.__init__(self, host, int(port)) self.tls = tls if self.tls: self.tls_cafile = tls_cafile self.tls_capath = tls_capath self.tls_cadata = tls_cadata self.tls_client_cert = tls_client_cert self.tls_client_key = tls_client_key self.tls_client_password = tls_client_password self.ssl_context = ssl.create_default_context( purpose=ssl.Purpose.SERVER_AUTH, cafile=self.tls_cafile, capath=self.tls_capath, cadata=self.tls_cadata) self.tls_server_name = tls_server_name self.ssl_context.check_hostname = (self.tls_server_name is not None) if self.tls_client_cert is not None: self.ssl_context.load_cert_chain(self.tls_client_cert, self.tls_client_key, self.tls_client_password)
def create_logger(self, logger_name=None): if logger_name is None: logger_name = self.__class__.__name__ self.logger = logging.getLogger(logger_name) self.logger.addHandler( logging.FileHandler('logs/%s.log' % logger_name, mode='w')) self.logger.setLevel(1) socket_handler = SocketHandler('127.0.0.1', 19996) self.logger.addHandler(socket_handler) self.logger.info("Created logger: %s" % logger_name)
def add_socket_handler(socket_host, socket_ports, level, logger): """ 给 logger 加上 socket 日志处理 :param {string} socket_host: 把日志发往远程机器的ip/host(为空则不发) :param {int | list<int>} socket_ports: 把日志发往远程机器的端口号,或者端口号列表(有 socket_host 参数时才会启用) :param {bool | string} level: 日志级别,默认级别:INFO :param {logging.Logger} logger: 指定日志实例(SocketHandler 会加入到这个 logger 里面) """ if not socket_host or not socket_ports: return socket_ports = [socket_ports] if isinstance(socket_ports, (int, long)) else socket_ports for socket_port in socket_ports: # 先移除旧的同socket处理器,省去修改旧处理器的麻烦 logger.handlers[:] = [ h for h in logger.handlers if not (type(h) is SocketHandler and h.host == socket_host and h.port == socket_port) ] # 如果父级已经发了同一份socket日志,则没必要再发同样内容(避免同一个日志重复两次) parent_name = logger.name parent_handlers = [] while parent_name.rfind('.') > 0: parent_name = parent_name[:parent_name.rfind('.')] parent_logger = getLogger(parent_name, add_parent_filter=False) parent_handlers.extend([ h for h in parent_logger.handlers if type(h) is SocketHandler and h.host == socket_host and h.port == socket_port and h.level <= level ]) if parent_handlers: break else: parent_handlers.extend([ h for h in logging.root.handlers if type(h) is SocketHandler and h.host == socket_host and h.port == socket_port and h.level <= level ]) if not parent_handlers: # 如果父级没有重复日志,则添加本级日志处理器 handler = SocketHandler(socket_host, socket_port) handler.setLevel(level) logger.addHandler(handler)
def __init__(self, url, exchange='logging.gelf', debugging_fields=True, extra_fields=True, fqdn=False): self.url = url parsed = urlparse(url) if parsed.scheme != 'amqp': raise ValueError('invalid URL scheme (expected "amqp"): %s' % url) host = parsed.hostname or 'localhost' port = _ifnone(parsed.port, 5672) self.cn_args = { 'host': '%s:%s' % (host, port), 'username': _ifnone(parsed.username, 'guest'), 'password': _ifnone(parsed.password, 'guest'), 'virtual_host': '/', 'insist': False, } self.exchange = exchange self.debugging_fields = debugging_fields self.extra_fields = extra_fields self.fqdn = fqdn SocketHandler.__init__(self, host, port) self.addFilter(ExcludeFilter('amqplib'))
def create_logger(): # logging date = datetime.now().strftime('%Y-%m-%d_%H-%M') log_filename = os.path.join(base_dir, "{}.log".format(date)) # create logger with 'spam_application' logger = logging.getLogger("smart_vrt") logger.setLevel(CONFIG["logging"]["file_level"]) # create file handler which logs even debug messages fh = logging.FileHandler(log_filename) fh.setLevel(CONFIG["logging"]["file_level"]) # create console handler with a higher log level ch = logging.StreamHandler() ch.setLevel(CONFIG["logging"]["file_level"]) # create a socket gandler to send to cutelog sh = SocketHandler('127.0.0.1', 19996) sh.setLevel(CONFIG["logging"]["file_level"]) # create formatter and add it to the handlers formatter = logging.Formatter( "%(asctime)s.%(msecs)03d - %(levelname)-8s - %(threadName)-10s %(lineno)3d: %(message)s" ) fh.setFormatter(formatter) ch.setFormatter(formatter) sh.setFormatter(formatter) # add the handlers to the logger logger.addHandler(fh) logger.addHandler(ch) logger.addHandler(sh) return logger
def __init__(self, fname, is_detail, queue=None): Logger.queue = queue dir_ = config_dict["dir"] if not (ospath.isdir(dir_) or osaccess(dir_, osW_OK)): raise FileExistsError( f"logger dir {dir_} is not exists or is not writable") serve_address = config_dict["address"] level = config_dict["level"] fmt = config_dict["dfmt"] \ if is_detail else config_dict["fmt"] # build logger logger = logging.Logger(fname) logger.setLevel(level) formatter = logging.Formatter(fmt) # to console console = logging.StreamHandler(stdout) console.setFormatter(formatter) # to file if serve_address: # use remote log server handler = SocketHandler(*serve_address) elif Logger.queue: # use local log process handler = QueueHandler(Logger.queue) else: # simple log handler = TimedRotatingFileHandler( fname, when=config_dict["when"], backupCount=config_dict["backup_count"]) #handler.encoding = "utf-8" # support utf8 handler.setFormatter(formatter) logger.addHandler(console) logger.addHandler(handler) self.logger = logger
def init_logging(config): logger = logging.getLogger('jb_async') socketHandler = SocketHandler('localhost', DEFAULT_TCP_LOGGING_PORT) socketHandler.setLevel(config['LOG_LEVEL']) socketHandler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]')) logger.addHandler(socketHandler) logger.setLevel(config['LOG_LEVEL']) return logger
def makeSocket(self, timeout=1, **kwargs): """makeSocket""" sock = SocketHandler.makeSocket(self, timeout=timeout) sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, kwargs.get('keep_alive', 1)) if platform.system() in ('Linux', 'Windows'): sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, kwargs.get('after_idle_sec', 1)) sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, kwargs.get('interval_sec', 3)) sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, kwargs.get('max_fails', 5)) if self.use_tls is True: return ssl.wrap_socket(sock, cert_reqs=self.cert_reqs, ca_certs=self.ca_certs) return sock
def run(self): print 'Start logger thread' while True: s = self.queue.get() SocketHandler.send(self, s)
def __init__(self, host, port=5959, message_type='logstash', fqdn=False, version=1): SocketHandler.__init__(self, host, port) if version == 1: self.formatter = formatter.LogstashFormatterVersion1(message_type, [], fqdn) else: self.formatter = formatter.LogstashFormatterVersion0(message_type, [], fqdn)
def createSocket(self): SocketHandler.createSocket(self) if self.sock: self.send(self.hdr)
def __init__(self,source,path,host,port): self.path = path SocketHandler.__init__(self,host,port) header = Header() header.name = str(source) self.hdr = self._pack16(header.SerializeToString())
def makeSocket(self, timeout=1): s = SocketHandler.makeSocket(self, timeout) if self.ssl: return ssl.wrap_socket(s, keyfile=self.keyfile, certfile=self.certfile, ca_certs=self.ca_certs) return s
import logging, logging.handlers from logging.handlers import SocketHandler import time logLevel=logging.INFO logger_sms = logging.getLogger('sms') logger = logging.getLogger('sms.sgip') print logger_sms.parent.name print logger.parent.name #logger = logging.getLogger('sms.sgip') handler = SocketHandler('localhost', 9030) datefmt = "%Y-%m-%d %H:%M:%S" format_str = "[%(asctime)s]: %(levelname)s %(message)s" formatter = logging.Formatter(format_str, datefmt) handler.setFormatter(formatter) logger_sms.addHandler(handler) logger.addHandler(handler) logger.setLevel(logLevel) logger.debug('test') logger.info('test2------------------') #t1 = time.time() #for i in range(1,10000): # logger.info('testtesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttesttest') # # #t2 = time.time()
def __init__(self, host, port, keyfile=None, certfile=None, ca_certs=None, ssl=True): SocketHandler.__init__(self, host, port) self.keyfile = keyfile self.certfile = certfile self.ca_certs = ca_certs self.ssl = ssl
def makeSocket(self, timeout=1): s = SocketHandler.makeSocket(self,timeout) s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) return s
def __init__(self, host, port, fname): self.filename = fname SocketHandler.__init__(self, host, port)
def __init__(self, *args, **kwargs): SocketHandler.__init__(self, *args, **kwargs) self.retryStart = 0 self.retryMax = 0 self.retryFactor = 0
def __init__(self, host, port): SocketHandler.__init__(self, host, port) # # Attemt to create the socket and let flow up the exception self.sock = self.makeSocket()