def get_logger(args, config, uuid=uuid4().hex, name=__name__): """Setup logging.""" fmt = logging.Formatter( '{asctime} - {module}:{lineno} - {levelname} - {uuid} - {msg}', style='{') level = logging.DEBUG if args.debug else logging.INFO logger = logging.getLogger(name) logger.setLevel(level) if args.verbose: stream = logging.StreamHandler() stream.setLevel(level) stream.setFormatter(fmt) logger.addHandler(stream) file_levels = ( ('debug.log', (logging.DEBUG, )), ('main.log', (logging.INFO, )), ('error.log', (logging.WARNING, logging.ERROR, logging.CRITICAL)), ) filepath = config['logging'].get('path', './logs/') if not os.path.exists(filepath): os.makedirs(filepath) for filename, levels in file_levels: fileh = TimedRotatingFileHandler(filepath + filename) fileh.setFormatter(fmt) fileh.setLevel(level) fileh.addFilter(LevelFilter(levels)) logger.addHandler(fileh) return Logger(logger, {'uuid': uuid}), uuid
def setup_logger(name=__name__, cfg_path='.'): """setup logging """ if not os.path.isdir(cfg_path): os.makedirs(cfg_path) LOGFILE = os.path.join(cfg_path, 'placement.log') commandslog = TimedRotatingFileHandler(LOGFILE, when='D', interval=1, backupCount=7) commandslog.setLevel('DEBUG') commandslog.setFormatter( logging.Formatter( "[%(levelname)-4s: %(asctime)s, " "%(filename)s:%(lineno)d] %(message)s", datefmt='%m-%d %H:%M:%S')) logger = logging.getLogger('') logger.setLevel('DEBUG') no_filter = os.environ.get('PLACEMENT_NOFILTER', None) if no_filter is None: f = logging.Filter(name='bundleplacer') commandslog.addFilter(f) logger.addHandler(commandslog) return logger
def setup_logger(name=__name__, cfg_path="."): """setup logging """ if not os.path.isdir(cfg_path): os.makedirs(cfg_path) LOGFILE = os.path.join(cfg_path, "placement.log") commandslog = TimedRotatingFileHandler(LOGFILE, when="D", interval=1, backupCount=7) commandslog.setLevel("DEBUG") commandslog.setFormatter( logging.Formatter( "[%(levelname)-4s: %(asctime)s, " "%(filename)s:%(lineno)d] %(message)s", datefmt="%m-%d %H:%M:%S" ) ) logger = logging.getLogger("") logger.setLevel("DEBUG") no_filter = os.environ.get("PLACEMENT_NOFILTER", None) if no_filter is None: f = logging.Filter(name="placement") commandslog.addFilter(f) logger.addHandler(commandslog) return logger
def configure_logging(debug=False): if not debug: log_level = "INFO" else: log_level = "DEBUG" logger.handlers = [] logger.setLevel(log_level) # Console logging ch = logging.StreamHandler() cf = ( debug and logging.Formatter or NoExceptionFormatter )('%(asctime)-15s - %(name)-32s (%(thread)x) : %(levelname)s (%(module)s:%(lineno)d) - %(message)s' ) ch.setFormatter(cf) ch.setLevel(log_level) # ch.addFilter(MyFilter()) logger.addHandler(ch) # File Logging global fh fh = TimedRotatingFileHandler(os.path.join(args.config_dir, 'log/bazarr.log'), when="midnight", interval=1, backupCount=7) f = OneLineExceptionFormatter( '%(asctime)s|%(levelname)-8s|%(name)-32s|%(message)s|', '%d/%m/%Y %H:%M:%S') fh.setFormatter(f) fh.addFilter(BlacklistFilter()) fh.addFilter(PublicIPFilter()) if debug: logging.getLogger("apscheduler").setLevel(logging.DEBUG) logging.getLogger("subliminal").setLevel(logging.DEBUG) logging.getLogger("subliminal_patch").setLevel(logging.DEBUG) logging.getLogger("subzero").setLevel(logging.DEBUG) logging.getLogger("git").setLevel(logging.DEBUG) logging.getLogger("apprise").setLevel(logging.DEBUG) logging.debug('Bazarr version: %s', os.environ["BAZARR_VERSION"]) logging.debug('Bazarr branch: %s', settings.general.branch) logging.debug('Operating system: %s', platform.platform()) logging.debug('Python version: %s', platform.python_version()) else: logging.getLogger("apscheduler").setLevel(logging.WARNING) logging.getLogger("subliminal").setLevel(logging.CRITICAL) logging.getLogger("subliminal_patch").setLevel(logging.CRITICAL) logging.getLogger("subzero").setLevel(logging.ERROR) logging.getLogger("enzyme").setLevel(logging.CRITICAL) logging.getLogger("guessit").setLevel(logging.WARNING) logging.getLogger("rebulk").setLevel(logging.WARNING) logging.getLogger("stevedore.extension").setLevel(logging.CRITICAL) logging.getLogger("geventwebsocket.handler").setLevel(logging.WARNING) fh.setLevel(log_level) logger.addHandler(fh)
def setup_logging(debugging=False): os.makedirs(CRASH_DIR, exist_ok=True) error_handler = logging.handlers.TimedRotatingFileHandler( filename=os.path.join(CRASH_DIR, 'crash'), when='s', interval=1, delay=True) error_handler.setLevel(logging.ERROR) os.makedirs(QUERY_DIR, exist_ok=True) querry_handler = TimedRotatingFileHandler( filename=os.path.join(QUERY_DIR, 'queries'), when='midnight') querry_handler.setLevel(logging.INFO) querry_handler.addFilter(query_filter) querry_handler.setFormatter(logging.Formatter("%(asctime)s: %(filename)s: %(message)s")) logger = logging.getLogger() logger.addHandler(error_handler) logger.addHandler(querry_handler) if debugging: console_handler = logging.StreamHandler() console_handler.setLevel(logging.DEBUG) console_handler.setFormatter(logging.Formatter("%(levelname)s:%(name)s [%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s")) logger.addHandler(console_handler) logger.setLevel(logging.DEBUG) else: logger.setLevel(logging.INFO) return logger
def get_logger(verbose=False, debug=False, uuid=uuid4().hex, name=__name__, filepath='./logs/'): """Setup logging.""" fmt = logging.Formatter( '{asctime} - {levelname} - {module}:{funcName}:{lineno} - {processName}:{threadName} - {msg}', style='{') level = logging.DEBUG logger = logging.getLogger(name) logger.setLevel(level) if verbose: stream = logging.StreamHandler() stream.setLevel(level) stream.setFormatter(fmt) logger.addHandler(stream) file_levels = ( ('debug.log', (logging.DEBUG, )), ('main.log', (logging.INFO, )), ('error.log', (logging.WARNING, logging.ERROR, logging.CRITICAL)), ) if not os.path.exists(filepath): os.makedirs(filepath) for filename, levels in file_levels: fileh = TimedRotatingFileHandler(filepath + filename) fileh.setFormatter(fmt) fileh.setLevel(level) fileh.addFilter(LevelFilter(levels)) logger.addHandler(fileh) return Logger(logger, {'uuid': uuid}), uuid
def setup_logger(name=__name__, cfg_path='.'): """setup logging """ if not os.path.isdir(cfg_path): os.makedirs(cfg_path) LOGFILE = os.path.join(cfg_path, 'placement.log') commandslog = TimedRotatingFileHandler(LOGFILE, when='D', interval=1, backupCount=7) commandslog.setLevel('DEBUG') commandslog.setFormatter(logging.Formatter( "[%(levelname)-4s: %(asctime)s, " "%(filename)s:%(lineno)d] %(message)s", datefmt='%m-%d %H:%M:%S')) logger = logging.getLogger('') logger.setLevel('DEBUG') no_filter = os.environ.get('PLACEMENT_NOFILTER', None) if no_filter is None: f = logging.Filter(name='bundleplacer') commandslog.addFilter(f) logger.addHandler(commandslog) return logger
def setup_logging(log_file): # TODO: more advanced filters, logging info like when rooms go live to console # https://docs.python.org/3/library/logging.config.html#logging-config-dictschema log_backup_time = dt_time(tzinfo=TOKYO_TZ) log_filter = logging.Filter(name="showroom") file_log_handler = TimedRotatingFileHandler(log_file, encoding='utf8', when='midnight', atTime=log_backup_time) file_log_formatter = logging.Formatter(fmt='%(asctime)s %(name)-12s %(levelname)-8s %(threadName)s:\n%(message)s', datefmt='%m-%d %H:%M:%S') file_log_handler.setFormatter(file_log_formatter) # leave this in local time? file_log_handler.addFilter(log_filter) file_log_handler.setLevel(logging.DEBUG) console_handler = logging.StreamHandler() console_formatter = logging.Formatter(fmt='%(asctime)s %(message)s', datefmt=HHMM_FMT) console_formatter.converter = tokyotime console_handler.setLevel(logging.INFO) console_handler.setFormatter(console_formatter) console_handler.addFilter(log_filter) logger = logging.getLogger('showroom') logger.setLevel(logging.DEBUG) logger.propagate = False # at this moment, there shouldn't be any handlers in the showroom logger # however, i can't preclude the possibility of there ever being such handlers for handler in (file_log_handler, console_handler): if handler not in logger.handlers: logger.addHandler(handler)
def setupLogging(app): logging.addLevelName(ACCESS_LEVEL, "ACCESS") logging.Logger.access = access handlerAccess = TimedRotatingFileHandler( app.config["LOG_FILE_ACCESS"], when = app.config["LOG_WHEN"], interval = app.config["LOG_INTERVAL"], backupCount = app.config["LOG_BACKUP_COUNT"] ) handlerAccess.setFormatter(logging.Formatter(app.config["LOG_ACCESS_FORMAT"])) handlerAccess.addFilter(FilterAccessLogs()) app.logger.addHandler(handlerAccess) handlerApp = TimedRotatingFileHandler( app.config["LOG_FILE_APPLICATION"], when = app.config["LOG_WHEN"], interval = app.config["LOG_INTERVAL"], backupCount = app.config["LOG_BACKUP_COUNT"] ) handlerApp.setFormatter(logging.Formatter(app.config["LOG_APP_FORMAT"])) handlerApp.addFilter(FilterAppLogs()) app.logger.addHandler(handlerApp) if app.config["DEBUG"]: app.logger.setLevel(logging.DEBUG) else: app.logger.setLevel(logging.INFO)
def getFileHandler(): log_file_formatter = coloredlogs.ColoredFormatter( log_format, field_styles=field_styles, level_styles=level_styles) log_file_handler = TimedRotatingFileHandler(LOG_FILE, when='midnight') log_file_handler.addFilter(coloredlogs.ProgramNameFilter()) log_file_handler.setFormatter(log_file_formatter) return log_file_handler
def setupLogging(app): logging.addLevelName(ACCESS_LEVEL, "ACCESS") logging.Logger.access = access handlerAccess = TimedRotatingFileHandler( app.config["LOG_FILE_ACCESS"], when=app.config["LOG_WHEN"], interval=app.config["LOG_INTERVAL"], backupCount=app.config["LOG_BACKUP_COUNT"]) handlerAccess.setFormatter( logging.Formatter(app.config["LOG_ACCESS_FORMAT"])) handlerAccess.addFilter(FilterAccessLogs()) app.logger.addHandler(handlerAccess) handlerApp = TimedRotatingFileHandler( app.config["LOG_FILE_APPLICATION"], when=app.config["LOG_WHEN"], interval=app.config["LOG_INTERVAL"], backupCount=app.config["LOG_BACKUP_COUNT"]) handlerApp.setFormatter(logging.Formatter(app.config["LOG_APP_FORMAT"])) handlerApp.addFilter(FilterAppLogs()) app.logger.addHandler(handlerApp) if app.config["DEBUG"]: app.logger.setLevel(logging.DEBUG) else: app.logger.setLevel(logging.INFO)
def _createUserLogger(user): """ Create a special logger for this user that will log all actions when logged in. """ global FILE_HANDLER_CACHE # do not create user logger if LOG_USERS is disabled if not app.config['LOG_USERS']: return # We want to see everything, so set it to DEBUG logLevel = logging.DEBUG logFileName = "%s.log" % user # make a file logger that rotates every day from logging.handlers import TimedRotatingFileHandler file_handler = TimedRotatingFileHandler(os.path.join(app.config['LOG_DIR'], logFileName), when='D', interval=1, backupCount=14) file_handler.setLevel(logLevel) formatter = logging.Formatter('%(asctime)s %(levelname)s] - %(message)s') #file_handler.setFormatter(formatter) # add filter that only applies to this user file_handler.addFilter(UserLoggingFilter(user)) file_handler.user = user # add handler to global app logger app.logger.addHandler(file_handler) # track all user handlers for later removal/cleanup FILE_HANDLER_CACHE[user] = file_handler
def setup_logger(name=__name__): """setup logging Overridding the default log level(**debug**) can be done via an environment variable `UCI_LOGLEVEL` Available levels: * CRITICAL * ERROR * WARNING * INFO * DEBUG .. note:: This filters only cloudinstall logging info. Set your environment var to `UCI_NOFILTER` to see debugging log statements from imported libraries (ie macumba) .. code:: # Running cloud-status from cli $ UCI_LOGLEVEL=INFO openstack-status # Disable log filtering $ UCI_NOFILTER=1 openstack-status :params str name: logger name :returns: a log object """ HOME = os.getenv('HOME') CONFIG_DIR = '.cloud-install' CONFIG_PATH = os.path.join(HOME, CONFIG_DIR) if not os.path.isdir(CONFIG_PATH): os.makedirs(CONFIG_PATH) LOGFILE = os.path.join(CONFIG_PATH, 'commands.log') commandslog = TimedRotatingFileHandler(LOGFILE, when='D', interval=1, backupCount=7) commandslog.setFormatter(logging.Formatter( '%(levelname)s \N{BULLET} %(asctime)s ' '[LINE:%(lineno)d, FUNC:%(funcName)s] ' '\N{BULLET} %(name)s \N{BULLET} ' '%(message)s', datefmt='%m-%d %H:%M:%S')) logger = logging.getLogger('') env = os.environ.get('UCI_LOGLEVEL', 'DEBUG') no_filter = os.environ.get('UCI_NOFILTER', None) if not no_filter: f = logging.Filter(name='cloudinstall') commandslog.addFilter(f) logger.setLevel(env) logger.addHandler(commandslog) return logger
def setup_logger(name=__name__): """setup logging Overridding the default log level(**debug**) can be done via an environment variable `UCI_LOGLEVEL` Available levels: * CRITICAL * ERROR * WARNING * INFO * DEBUG .. note:: This filters only ki_up logging info. Set your environment var to `UCI_NOFILTER` to see debugging log statements from imported libraries. .. code:: # Running ki_up_status from cli $ UCI_LOGLEVEL=INFO ki_up_status # Disable log filtering $ UCI_NOFILTER=1 ki_up_status :params str name: logger name :returns: a log object """ HOME = os.getenv('HOME') BASE_CONFIG_DIR = '.ki-up' CONFIG_PATH = os.path.join(HOME, BASE_CONFIG_DIR) if not os.path.isdir(CONFIG_PATH): os.makedirs(CONFIG_PATH) LOGFILE = os.path.join(CONFIG_PATH, ('ki-up-{}.log'.format(name))) ki_up_log = TimedRotatingFileHandler(LOGFILE, when='D', interval=1, backupCount=7) env = os.environ.get('UCI_LOGLEVEL', 'DEBUG') ki_up_log.setLevel(env) ki_up_log.setFormatter(logging.Formatter( "[%(levelname)-4s: %(asctime)s, " "%(filename)s:%(lineno)d] %(message)s", datefmt='%m-%d %H:%M:%S')) logger = logging.getLogger('') logger.setLevel(env) no_filter = os.environ.get('UCI_NOFILTER', None) if no_filter is None: f = logging.Filter(name='ki-up') ki_up_log.addFilter(f) logger.addHandler(ki_up_log) return logger
def get_file_handler(self): logfile = f"Logs/{self.system_name }.log" # logfile = self.system_name + ".log" file_handler = TimedRotatingFileHandler(logfile, when="midnight", interval=1) file_handler.setLevel(logging.DEBUG) formatter = self.form logFormatter = logging.Formatter(formatter) file_handler.setFormatter(logFormatter) file_handler.suffix = "%d-%m-%Y-%H-%M-%S" file_handler.addFilter(SystemLogFilter()) return file_handler
def logger_init(log_file_path, log_level): logger = TimedRotatingFileHandler( filename=log_file_path, when="W6", interval=1, ) logger.setLevel(log_level) logger.addFilter(LogLevelFilter(level=log_level)) logger.setFormatter( logging.Formatter( '%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s' )) return logger
def add_file_handler(*loggers: logging.Logger) -> None: log_dir_path = os.path.join(BASE_PATH, 'logs') Common.make_dirs(log_dir_path) log_file_path = os.path.join(log_dir_path, time.strftime( '%Y-%m-%d', time.localtime(time.time())) + '.log') file_handler = TimedRotatingFileHandler( log_file_path, 'D', 1, 7, None, False, False) file_handler.setFormatter(logging.Formatter( '%(request_id)s - %(asctime)s - %(levelname)s - %(filename)s - %(funcName)s - %(lineno)s - %(message)s')) file_handler.addFilter(RequestIdFilter()) file_handler.setLevel(logging.INFO) for l in loggers: l.addHandler(file_handler)
def build(self, filePath, formatter): commonFileName = "%s.log" % filePath errorFileName = "%s.err.log" % filePath commonHandler = TimedRotatingFileHandler(commonFileName, when='d') commonHandler.setLevel(DEBUG) commonHandler.setFormatter(formatter) commonHandler.addFilter(UpLevelFilter(INFO)) self.addHandler(commonHandler) errorHandler = TimedRotatingFileHandler(errorFileName, when='d') errorHandler.setLevel(WARNING) errorHandler.setFormatter(formatter) errorHandler.addFilter(UpLevelFilter(FATAL)) self.addHandler(errorHandler)
def get_file_log_handler(filename): handler = TimedRotatingFileHandler(filename, delay=False, encoding='utf-8', interval=1, utc=True, when='D', backupCount=30) handler.addFilter(PackagePathFilter()) handler.setLevel(logging.DEBUG) # noinspection SpellCheckingInspection handler.setFormatter( logging.Formatter( '[%(asctime)s] %(levelname)s %(relative_path)s:%(lineno)s %(message)s' )) return handler
def loginit(logfile, debug=False): if debug: loglevel = logging.DEBUG logfmt = '%(name)s %(asctime)s [%(process)d] [%(levelname)s] %(message)s [in %(pathname)s:%(lineno)d]' else: loglevel = logging.INFO logfmt = '%(name)s %(asctime)s [%(process)d] [%(levelname)s] %(message)s' log = logging.getLogger() fh = TimedRotatingFileHandler(logfile, when='D') datefmt = r"%Y-%m-%d %H:%M:%S" fh.setFormatter(logging.Formatter(logfmt, datefmt)) f = ContextFilter() #去除gunicorn的log fh.addFilter(f) log.addHandler(fh) log.setLevel(loglevel)
def configure_logging(debug=False): if not debug: log_level = "INFO" else: log_level = "DEBUG" logger.handlers = [] logger.setLevel(log_level) # Console logging ch = logging.StreamHandler() cf = (debug and logging.Formatter or NoExceptionFormatter)( '%(asctime)-15s - %(name)-32s (%(thread)x) : %(levelname)s (%(module)s:%(lineno)d) - %(message)s') ch.setFormatter(cf) ch.setLevel(log_level) # ch.addFilter(MyFilter()) logger.addHandler(ch) # File Logging global fh fh = TimedRotatingFileHandler(os.path.join(config_dir, 'log/bazarr.log'), when="midnight", interval=1, backupCount=7) f = OneLineExceptionFormatter('%(asctime)s|%(levelname)-8s|%(name)-32s|%(message)s|', '%d/%m/%Y %H:%M:%S') fh.setFormatter(f) fh.addFilter(BlacklistFilter()) fh.addFilter(PublicIPFilter()) if debug: logging.getLogger("apscheduler").setLevel(logging.DEBUG) logging.getLogger("subliminal").setLevel(logging.DEBUG) logging.getLogger("git").setLevel(logging.DEBUG) logging.getLogger("apprise").setLevel(logging.DEBUG) else: logging.getLogger("apscheduler").setLevel(logging.WARNING) logging.getLogger("subliminal").setLevel(logging.CRITICAL) logging.getLogger("enzyme").setLevel(logging.CRITICAL) logging.getLogger("guessit").setLevel(logging.WARNING) logging.getLogger("rebulk").setLevel(logging.WARNING) logging.getLogger("stevedore.extension").setLevel(logging.CRITICAL) fh.setLevel(log_level) logger.addHandler(fh)
def init_logger(app): if not app.debug: if app.config.get('LOG_DIR'): from logging.handlers import TimedRotatingFileHandler # https://docs.python.org/3.6/library/logging.handlers.html#timedrotatingfilehandler handler = TimedRotatingFileHandler( os.path.join(app.config['LOG_DIR'], 'TelecomMonitorWebTool.app.log'), 'midnight' ) else: handler = logging.StreamHandler() handler.setLevel(logging.DEBUG) handler.setFormatter( logging.Formatter('[%(asctime)s] %(process)d %(request_id)s %(levelname)s - %(message)s')) handler.addFilter(RequestIDLogFilter()) app.logger.addHandler(handler) app.logger.setLevel(logging.INFO) logging.getLogger('werkzeug').addHandler(handler) logging.getLogger().addHandler(handler) # app.logger.debug('test debug output') # app.logger.info('test info output') # app.logger.warning('test warning output') # app.logger.error('test error output') else: fmt = '%(asctime)s %(hostname)s %(name)s[%(process)d] %(request_id)s %(levelname)s %(message)s' coloredlogs.install( logger=app.logger, level='DEBUG', fmt=fmt, ) coloredlogs.install( logger=logging.getLogger('werkzeug'), level='DEBUG', fmt=fmt, ) logging.getLogger('werkzeug').propagate = False coloredlogs.install( level='DEBUG', fmt=fmt, ) for h in app.logger.handlers + logging.getLogger('werkzeug').handlers + logging.getLogger().handlers: h.addFilter(RequestIDLogFilter())
def init_app(): formatter = logging.Formatter('%(asctime)s - %(module)s %(filename)s %(funcName)s:%(lineno)s - %(name)s -%(message)s') logging.root.setLevel(logging.INFO) console = logging.StreamHandler() console.setLevel(logging.INFO) console.setFormatter(logging.Formatter('%(message)s')) console.addFilter(logging.Filter()) logging.root.addHandler(console) file_handler_info = TimedRotatingFileHandler(filename=log_info['LOG_PATH_INFO'], backupCount=log_info['LOG_FILE_BACKUP_COUNT'], when='D', interval=1, encoding='utf-8') file_handler_info.setFormatter(formatter) file_handler_info.setLevel(logging.INFO) file_handler_info.addFilter(logging.Filter()) logging.root.addHandler(file_handler_info) return True
def set_logger_params(app): if app.config['LOGGER']['DEBUG_PATH'] != None: debug_handler = TimedRotatingFileHandler( app.config['LOGGER']['DEBUG_PATH'], when='midnight', interval=1) f = DebugFilter() debug_handler.addFilter(f) debug_handler.setLevel(logging.DEBUG) debug_handler.setFormatter( logging.Formatter(app.config['LOGGER']['FORMAT'])) app.logger.addHandler(debug_handler) if app.config['LOGGER']['PATH'] != None: handler = TimedRotatingFileHandler(app.config['LOGGER']['PATH'], when='midnight', interval=1) handler.setLevel(logging.INFO) handler.setFormatter(logging.Formatter(app.config['LOGGER']['FORMAT'])) app.logger.addHandler(handler)
def build(self, file_path, formatter): common_file_name = "%s.log" % file_path error_file_name = "%s.err.log" % file_path common_handler = TimedRotatingFileHandler(common_file_name, when='MIDNIGHT') common_handler.setLevel(DEBUG) common_handler.setFormatter(formatter) common_handler.addFilter(UpLevelFilter(INFO)) self.addHandler(common_handler) error_handler = TimedRotatingFileHandler(error_file_name, when='MIDNIGHT') error_handler.setLevel(WARNING) error_handler.setFormatter(formatter) error_handler.addFilter(UpLevelFilter(FATAL)) self.addHandler(error_handler)
def setup_client_logging(bot): # Setup irc.client logger client_logger = logging.getLogger('irc.client') client_logger.setLevel(logging.DEBUG) client_logger.propagate = False # Setup channel logs channel_logger = logging.getLogger('irc.client') channel_handler = TRHandler("log/moolog/moobot.log", when='midnight') channel_handler.addFilter(LowLevelFilter()) channel_handler.addFilter(ChannelLogFilter()) channel_handler.setFormatter(ChannelLogFormatter(bot=bot, fmt="%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S")) channel_logger.addHandler(channel_handler) # add main handler client_handler = logging.StreamHandler(sys.stdout) client_handler.addFilter(LowLevelFilter()) client_handler.addFilter(PingPongFilter()) client_handler.addFilter(PrivMsgFilter()) client_handler.setFormatter(ServerMsgFormatter("CLIENT %(message)s")) client_logger.addHandler(client_handler)
def main(argv): # initialize the logger fmt = "%(asctime)-15s %(name)-5s %(levelname)-8s %(message)s" root_logger = logging.getLogger() root_logger.setLevel(logging.DEBUG) # Make the default stream handler at INFO level rh = logging.StreamHandler() rh.setLevel(logging.INFO) rh.setFormatter(logging.Formatter(fmt)) root_logger.addHandler(rh) # Add a new handler for DEBUG messages from some specific functions Just # for the hell of it, route into a logfile, set to rotate ever 10 seconds debug_h = TimedRotatingFileHandler( "/tmp/toy-whatever.log", when='S', interval=10, ) debug_h.setLevel(logging.DEBUG) debug_f = logging.Formatter( "%(asctime)-15s %(name)-5s %(levelname)-8s[[cool cool cool]] %(message)s" ) debug_h.setFormatter(debug_f) # Add a filter for events from specific function debug_h.addFilter( FuncNameWhitelistFilter([ "make", "mine_iron", #"kill_cow" ])) root_logger.addHandler(debug_h) # Add a cusom filter, we want a way to blacklist/whitelist functions G = Economy(write_dot="/tmp/blamo.dot") nodes = list(G.G.nodes()) while True: G.make(random.choice(nodes)) time.sleep(random.random())
def setup_logger(name=__name__): LOGDIR = "logs" LOGFILE = os.path.join(LOGDIR, "debug.log") if not os.path.isdir(LOGDIR): os.makedirs(LOGDIR) log = TimedRotatingFileHandler(LOGFILE, when='D', interval=1, backupCount=7) log.setLevel('DEBUG') log.setFormatter(logging.Formatter( "%(asctime)s " "%(name)s:%(lineno)d %(message)s", datefmt='%m/%d %H:%M')) log_filter = logging.Filter(name='probert') log.addFilter(log_filter) logger = logging.getLogger('') logger.setLevel('DEBUG') logger.addHandler(log) return logger
def add_timed_rotating_handler(self, log_path=None, level=None, when='D', interval=1, backup_count=5, formatter=None, filters=None): handler = TimedRotatingFileHandler(log_path or self.log_path, when=when, interval=interval, backupCount=backup_count) handler.setLevel(level or self.level) handler.setFormatter(formatter or self.formatter) if filters is None: filters = [] for f in filters: handler.addFilter(f) self.addHandler(handler) return self
def __init__(self, level=logging.DEBUG, maxBytes=10 * 1024 * 1024): # 创建日志对象, 添加级别 TimeLogger.logger.setLevel(level) # 设置日志文件名和管理器 debug_filename = time.strftime("%Y-%m-%d") + '_debug' + '.log' debug_filename = os.path.join(filePath, debug_filename) debug_file = TimedRotatingFileHandler(debug_filename, when='MIDNIGHT', backupCount=10, encoding='utf-8') info_filename = time.strftime("%Y-%m-%d") + '_info' + '.log' info_filename = os.path.join(filePath , info_filename) info_file = TimedRotatingFileHandler(info_filename, when='MIDNIGHT', backupCount=10, encoding='utf-8') info_file.setLevel(level=logging.INFO) error_filename = time.strftime("%Y-%m-%d") + '_error' + '.log' error_filename = os.path.join(filePath , error_filename) error_file = TimedRotatingFileHandler(error_filename, when='MIDNIGHT', backupCount=10, encoding='utf-8') # 每行日志的前缀设置 # '%(asctime)s %(filename)s [line:%(lineno)d] %(levelname)s thread-%(thread)d: %(message)s' formatter = logging.Formatter( '%(asctime)s %(levelname)s thread-%(thread)d %(message)s') debug_file.setFormatter(formatter) info_file.setFormatter(formatter) error_file.setFormatter(formatter) # 为每个文件增加日志过滤, 仅显示本级别的日志 debug_filter = LoggerFilter(level=logging.DEBUG) info_filter = LoggerFilter(level=logging.INFO) error_filter = LoggerFilter(level=logging.ERROR) debug_file.addFilter(debug_filter) info_file.addFilter(info_filter) error_file.addFilter(error_filter) # logger对象增加文件管理器 TimeLogger.logger.addHandler(debug_file) TimeLogger.logger.addHandler(info_file) TimeLogger.logger.addHandler(error_file)
def setup_package_logger(workflow_id=None, console_loglevel='INFO', log_path=None, file_loglevel='INFO', backup_count=24): """ Pass """ logger = logging.getLogger(PACKAGE_LOGGER_NAME) logger.setLevel('DEBUG') if logger.handlers: logger.debug('"%s" already setup - skipping logger...', PACKAGE_LOGGER_NAME) return logger formatter = logging.Formatter( '%(asctime)s - %(name)s - %(workflow_id)s - %(levelname)s - %(message)s' ) if console_loglevel: console_handler = logging.StreamHandler() console_handler.setLevel(console_loglevel) console_handler.setFormatter(formatter) console_handler.addFilter( WorkflowIdFilter(workflow_id, name='workflow_id')) logger.addHandler(console_handler) if log_path: file_handler = TimedRotatingFileHandler(log_path, when='h', backupCount=backup_count) file_handler.setLevel(file_loglevel) file_handler.setFormatter(formatter) file_handler.addFilter( WorkflowIdFilter(workflow_id, name='workflow_id')) logger.addHandler(file_handler) return logger
def setup_logging(log_file): # TODO: more advanced filters, logging info like when rooms go live to console # https://docs.python.org/3/library/logging.config.html#logging-config-dictschema log_backup_time = dt_time(tzinfo=TOKYO_TZ) log_filter = logging.Filter(name="showroom") file_log_handler = TimedRotatingFileHandler(log_file, encoding='utf8', when='midnight', atTime=log_backup_time) file_log_formatter = logging.Formatter( fmt= '%(asctime)s %(name)-12s %(levelname)-8s %(threadName)s:\n%(message)s', datefmt='%m-%d %H:%M:%S') file_log_handler.setFormatter(file_log_formatter) # leave this in local time? file_log_handler.addFilter(log_filter) file_log_handler.setLevel(logging.DEBUG) console_handler = logging.StreamHandler() console_formatter = logging.Formatter(fmt='%(asctime)s %(message)s', datefmt=HHMM_FMT) console_formatter.converter = tokyotime console_handler.setLevel(logging.INFO) console_handler.setFormatter(console_formatter) console_handler.addFilter(log_filter) logger = logging.getLogger('showroom') logger.setLevel(logging.DEBUG) logger.propagate = False # at this moment, there shouldn't be any handlers in the showroom logger # however, i can't preclude the possibility of there ever being such handlers for handler in (file_log_handler, console_handler): if handler not in logger.handlers: logger.addHandler(handler)
def _createUserLogger(user): """ Create a special logger for this user that will log all actions when logged in. """ global FILE_HANDLER_CACHE # do not create user logger if LOG_USERS is disabled if not app.config['LOG_USERS']: return # We want to see everything, so set it to DEBUG logLevel = logging.DEBUG logFileName = "%s.log" % user # make a file logger that rotates every day from logging.handlers import TimedRotatingFileHandler file_handler = TimedRotatingFileHandler(os.path.join( app.config['LOG_DIR'], logFileName), when='D', interval=1, backupCount=14) file_handler.setLevel(logLevel) formatter = logging.Formatter('%(asctime)s %(levelname)s] - %(message)s') #file_handler.setFormatter(formatter) # add filter that only applies to this user file_handler.addFilter(UserLoggingFilter(user)) file_handler.user = user # add handler to global app logger app.logger.addHandler(file_handler) # track all user handlers for later removal/cleanup FILE_HANDLER_CACHE[user] = file_handler
from api.image_api import image_blue from db.base_model import db app = Flask(__name__) RequestID(app) LOG_PATH = 'log/app.log' HANDLER = TimedRotatingFileHandler(LOG_PATH, when="D", interval=1, backupCount=30) FORMATTER = logging.Formatter( "[%(asctime)s][%(request_id)s] [%(pathname)s:%(lineno)d] %(levelname)s - %(message)s" ) HANDLER.setFormatter(FORMATTER) HANDLER.addFilter(RequestIDLogFilter()) app.logger.addHandler(HANDLER) app.logger.setLevel(logging.DEBUG) # url的格式为:数据库的协议://用户名:密码@ip地址:端口号(默认可以不写)/数据库名 app.config[ "SQLALCHEMY_DATABASE_URI"] = "mysql://*****:*****@127.0.0.1/dockermanager" # app.config["SQLALCHEMY_DATABASE_URI"] = "mysql://*****:*****@127.0.0.1/dockermanager" # 动态追踪数据库的修改. 性能不好. 且未来版本中会移除. 目前只是为了解决控制台的提示才写的 app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False app.config["SQLALCHEMY_ECHO"] = True db.init_app(app) app.register_blueprint(project_blue) app.register_blueprint(docker_blue) app.register_blueprint(image_blue)
def get_file_handler(): file_handler = TimedRotatingFileHandler(LOG_FILE, when="midnight") file_handler.addFilter(ContextFilter()) file_handler.setFormatter(FORMATTER) return file_handler
def configure_logging(debug=False): warnings.simplefilter('ignore', category=ResourceWarning) if not debug: log_level = "INFO" else: log_level = "DEBUG" logger.handlers = [] logger.setLevel(log_level) # Console logging ch = logging.StreamHandler() cf = ( debug and logging.Formatter or NoExceptionFormatter )('%(asctime)-15s - %(name)-32s (%(thread)x) : %(levelname)s (%(module)s:%(lineno)d) - %(message)s' ) ch.setFormatter(cf) ch.setLevel(log_level) # ch.addFilter(MyFilter()) logger.addHandler(ch) # File Logging global fh fh = TimedRotatingFileHandler(os.path.join(args.config_dir, 'log/bazarr.log'), when="midnight", interval=1, backupCount=7, delay=True, encoding='utf-8') f = OneLineExceptionFormatter( '%(asctime)s|%(levelname)-8s|%(name)-32s|%(message)s|', '%d/%m/%Y %H:%M:%S') fh.setFormatter(f) fh.addFilter(BlacklistFilter()) fh.addFilter(PublicIPFilter()) fh.setLevel(log_level) logger.addHandler(fh) if debug: logging.getLogger("sqlite3worker").setLevel(logging.ERROR) logging.getLogger("apscheduler").setLevel(logging.DEBUG) logging.getLogger("subliminal").setLevel(logging.DEBUG) logging.getLogger("subliminal_patch").setLevel(logging.DEBUG) logging.getLogger("subzero").setLevel(logging.DEBUG) logging.getLogger("git").setLevel(logging.DEBUG) logging.getLogger("apprise").setLevel(logging.DEBUG) logging.getLogger("engineio.server").setLevel(logging.DEBUG) logging.getLogger("socketio.server").setLevel(logging.DEBUG) logging.getLogger("ffsubsync.subtitle_parser").setLevel(logging.DEBUG) logging.getLogger("ffsubsync.speech_transformers").setLevel( logging.DEBUG) logging.getLogger("ffsubsync.ffsubsync").setLevel(logging.DEBUG) logging.getLogger("srt").setLevel(logging.DEBUG) logging.debug('Bazarr version: %s', os.environ["BAZARR_VERSION"]) logging.debug('Bazarr branch: %s', settings.general.branch) logging.debug('Operating system: %s', platform.platform()) logging.debug('Python version: %s', platform.python_version()) else: logging.getLogger("sqlite3worker").setLevel(logging.CRITICAL) logging.getLogger("apscheduler").setLevel(logging.WARNING) logging.getLogger("apprise").setLevel(logging.WARNING) logging.getLogger("subliminal").setLevel(logging.CRITICAL) logging.getLogger("subliminal_patch").setLevel(logging.CRITICAL) logging.getLogger("subzero").setLevel(logging.ERROR) logging.getLogger("engineio.server").setLevel(logging.ERROR) logging.getLogger("socketio.server").setLevel(logging.ERROR) logging.getLogger("ffsubsync.subtitle_parser").setLevel(logging.ERROR) logging.getLogger("ffsubsync.speech_transformers").setLevel( logging.ERROR) logging.getLogger("ffsubsync.ffsubsync").setLevel(logging.ERROR) logging.getLogger("srt").setLevel(logging.ERROR) logging.getLogger("waitress").setLevel(logging.CRITICAL) logging.getLogger("knowit").setLevel(logging.CRITICAL) logging.getLogger("enzyme").setLevel(logging.CRITICAL) logging.getLogger("guessit").setLevel(logging.WARNING) logging.getLogger("rebulk").setLevel(logging.WARNING) logging.getLogger("stevedore.extension").setLevel(logging.CRITICAL)
def create_logger(loggername:str='logger', levelname:str='DEBUG'): filename = 'logger.log' logger = logging.getLogger(loggername) logger.setLevel(levels[levelname]) logger_format = logging.Formatter("[%(asctime)s][%(levelname)s][%(filename)s][%(funcName)s][%(lineno)03s]: %(message)s") console_format = logging.Formatter("%(message)s") handler_console = logging.StreamHandler() handler_console.setFormatter(console_format) handler_console.setLevel(logging.INFO) now = time.strftime("%Y%m%d") common_filename = path/'LOG'/f'{now}.log' handler_common = logging.FileHandler(common_filename , mode='a+', encoding='utf-8') handler_common.setLevel(levels[levelname]) handler_common.setFormatter(logger_format) for key in levels: # now = time.strftime("%Y%m%d") filename = path/key/f'logger.log' handler = TimedRotatingFileHandler(filename, encoding='utf-8', when='D', interval=1, backupCount=7) handler.suffix = "%Y-%m-%d.log" handler.setFormatter(logger_format) handler.setLevel(levels[key]) flt = logging.Filter() flt.filter = lambda record: record.levelno == levels[key] handler.addFilter(flt) logger.addHandler(handler) # handler_debug = TimedRotatingFileHandler(os.path.join(basedir, 'DEBUG', filename), encoding='utf-8', when='D', interval=1, backupCount=7) # handler_debug.suffix = "%Y-%m-%d.log" # "%Y-%m-%d_%H-%M-%S.log" # handler_debug.setFormatter(logger_format) # handler_debug.setLevel(logging.DEBUG) # filter_debug = logging.Filter() # filter_debug.filter = lambda record: record.levelno == logging.DEBUG # handler_debug.addFilter(filter_debug) # handler_info = TimedRotatingFileHandler(os.path.join(basedir, 'INFO', filename), encoding='utf-8', when='D', interval=1, backupCount=7) # handler_info.suffix = "%Y-%m-%d.log" # "%Y-%m-%d_%H-%M-%S.log" # handler_info.setFormatter(logger_format) # handler_info.setLevel(logging.INFO) # filter_info = logging.Filter() # filter_info.filter = lambda record: record.levelno == logging.INFO # handler_info.addFilter(filter_info) # handler_warning = TimedRotatingFileHandler(os.path.join(basedir, 'WARNING', filename), encoding='utf-8', when='D', interval=1, backupCount=7) # handler_warning.suffix = "%Y-%m-%d.log" # "%Y-%m-%d_%H-%M-%S.log" # handler_warning.setFormatter(logger_format) # handler_warning.setLevel(logging.WARNING) # filter_warning = logging.Filter() # filter_warning.filter = lambda record: record.levelno == logging.WARNING # handler_warning.addFilter(filter_warning) # handler_error = TimedRotatingFileHandler(os.path.join(basedir, 'ERROR', filename), encoding='utf-8', when='D', interval=1, backupCount=7) # handler_error.suffix = "%Y-%m-%d.log" # "%Y-%m-%d_%H-%M-%S.log" # handler_error.setFormatter(logger_format) # handler_error.setLevel(logging.ERROR) # filter_error = logging.Filter() # filter_error.filter = lambda record: record.levelno == logging.ERROR # handler_error.addFilter(filter_error) # handler_critical = TimedRotatingFileHandler(os.path.join(basedir, 'CRITICAL', filename), encoding='utf-8', when='D', interval=1, backupCount=7) # handler_critical.suffix = "%Y-%m-%d.log" # "%Y-%m-%d_%H-%M-%S.log" # handler_critical.setFormatter(logger_format) # handler_critical.setLevel(logging.CRITICAL) # filter_critical = logging.Filter() # filter_critical.filter = lambda record: record.levelno == logging.CRITICAL # handler_critical.addFilter(filter_critical) logger.addHandler(handler_console) logger.addHandler(handler_common) return logger # logger = create_logger('logger', 'DEBUG')
def setup_logger(name=__name__, headless=False): """setup logging Overridding the default log level(**debug**) can be done via an environment variable `UCI_LOGLEVEL` Available levels: * CRITICAL * ERROR * WARNING * INFO * DEBUG .. note:: This filters only cloudinstall logging info. Set your environment var to `UCI_NOFILTER` to see debugging log statements from imported libraries (ie macumba) .. code:: # Running cloud-status from cli $ UCI_LOGLEVEL=INFO openstack-status # Disable log filtering $ UCI_NOFILTER=1 openstack-status :params str name: logger name :returns: a log object """ HOME = os.getenv('HOME') CONFIG_DIR = '.cloud-install' CONFIG_PATH = os.path.join(HOME, CONFIG_DIR) if not os.path.isdir(CONFIG_PATH): os.makedirs(CONFIG_PATH) LOGFILE = os.path.join(CONFIG_PATH, 'commands.log') commandslog = TimedRotatingFileHandler(LOGFILE, when='D', interval=1, backupCount=7) env = os.environ.get('UCI_LOGLEVEL', 'DEBUG') commandslog.setLevel(env) commandslog.setFormatter(logging.Formatter( "[%(levelname)-4s: %(asctime)s, " "%(filename)s:%(lineno)d] %(message)s", datefmt='%m-%d %H:%M:%S')) if headless: consolelog = logging.StreamHandler() consolelog.setLevel(logging.INFO) consolelog.setFormatter(logging.Formatter( '[%(levelname)-4s: %(asctime)s] %(message)s', datefmt='%m-%d %H:%M:%S')) logger = logging.getLogger('') logger.setLevel(env) no_filter = os.environ.get('UCI_NOFILTER', None) if no_filter is None: f = logging.Filter(name='cloudinstall') commandslog.addFilter(f) if headless: consolelog.addFilter(f) logger.addHandler(commandslog) if headless: logger.addHandler(consolelog) return logger