def _create_logger(log_dir_path, logger_name, log_file_enabled, log_params): logger = logging.getLogger(logger_name) # Setup format formatter = Formatter(fmt='%(asctime)s %(levelname)7s: %(message)s', datefmt='%Y/%m/%d %p %I:%M:%S',) # Enable console output console = logging.StreamHandler() console.level = log_params['level'] console.formatter = formatter logger.addHandler(console) # Enable file output with rotation if log_file_enabled: log_file_path = os.path.join(log_dir_path, logger_name + ".log") file_handler = RotatingFileHandler( filename=log_file_path, mode='a', maxBytes=log_params['max_bytes'], backupCount=log_params['backup_count'] ) file_handler.level = log_params['level'] file_handler.formatter = formatter logger.addHandler(file_handler) logging.getLogger().setLevel(logging.DEBUG) return logger
def Register(): from logging.handlers import RotatingFileHandler from logging import Formatter h = {} formatter = Formatter("%(asctime)s [%(process)s:%(thread)s] ** %(levelname)s ** %(message)s") logsnames = getConfigValue('lognames') qsize = getIntConfigValue('qsize') rotation_bytes = getIntConfigValue('rotation_bytes') rotation_count = getIntConfigValue('rotation_count') for i in logsnames.split(","): h[i] = RotatingFileHandler(os.path.join(os.path.dirname(__file__), i), maxBytes=rotation_bytes, backupCount=rotation_count) h[i].formatter = formatter def write(): while True: try: rec = cherrypy.engine.log_cache.get_nowait() h[rec.name].emit(rec) except Empty: return from cherrypy.process import plugins pq = plugins.Monitor(cherrypy.engine,write,3) pq.subscribe() cherrypy.engine.log_cache = Queue(maxsize = qsize) conf_path = os.path.join(os.path.dirname(__file__), "config.txt") cherrypy.config.update(conf_path) app = cherrypy.quickstart( Root() )
def get_logger(name, log_dir=None, level=logging.INFO, split_error=False, fmt=default_fmt, use_tqdm=False): logger = logging.getLogger(name) if getattr(logger, 'initialized', False): return logger logger.initialized = True logger.setLevel(parse_level(level)) del logger.handlers[:] if log_dir: log_dir = Path(log_dir).expanduser() log_dir.mkdir(parents=True, exist_ok=True) log_file = log_dir / default_log file_handler = RotatingFileHandler(str(log_file), maxBytes=log_max_bytes, backupCount=log_backup_count) file_handler.setFormatter(logging.Formatter(fmt)) logger.addHandler(file_handler) if split_error: file_handler.addFilter(InfoFilter()) error_log_file = log_dir / default_error_log error_file_handler = RotatingFileHandler( error_log_file, maxBytes=log_max_bytes, backupCount=log_backup_count) error_file_handler.formatter = logging.Formatter(fmt) error_file_handler.addFilter(ErrorFilter()) logger.addHandler(error_file_handler) if use_tqdm: from tqdm import tqdm class TqdmHandler(logging.StreamHandler): def emit(self, record): msg = self.format(record) tqdm.write(msg) stream_handler = TqdmHandler() else: stream_handler = logging.StreamHandler() stream_handler.formatter = ColorFormatter(fmt) logger.addHandler(stream_handler) return logger
def create_file_log_handler(file_path, level=logging.DEBUG, max_bytes=5 * 1000 * 1024, backup_count=10, formatter=None): rotating_file = RotatingFileHandler( filename=file_path, maxBytes=max_bytes, backupCount=backup_count, delay=True, ) rotating_file.setLevel(level) rotating_file.formatter = formatter or _default_file_formatter return rotating_file
def gen_logger(name, level=INFO): # 初始化 logger = logging.getLogger(name) # level logger.level = level # 输出格式化 fmt = '%(asctime)s %(name)s %(levelname)s %(funcName)s: %(message)s' formatter = logging.Formatter(fmt) # handler file_handler = RotatingFileHandler(log_path, maxBytes=maxBytes) file_handler.formatter = formatter console_handler = logging.StreamHandler(sys.stdout) console_handler.formatter = formatter # 增加handler logger.addHandler(file_handler) logger.addHandler(console_handler) return logger
def get_logger(name, log_dir=None, level=logging.INFO, split_error=False, fmt=default_fmt): logger = logging.getLogger(name) if getattr(logger, 'initialized', False): return logger logger.initialized = True logger.setLevel(parse_level(level)) del logger.handlers[:] if log_dir: log_dir = Path(log_dir) log_dir.mkdir(parents=True, exist_ok=True) log_file = log_dir / default_log file_handler = RotatingFileHandler(log_file, maxBytes=log_max_bytes, backupCount=log_backup_count) file_handler.setFormatter(logging.Formatter(fmt)) logger.addHandler(file_handler) if split_error: file_handler.addFilter(InfoFilter()) error_log_file = log_dir / default_error_log error_file_handler = RotatingFileHandler( error_log_file, maxBytes=log_max_bytes, backupCount=log_backup_count) error_file_handler.formatter = logging.Formatter(fmt) error_file_handler.addFilter(ErrorFilter()) logger.addHandler(error_file_handler) stream_handler = TqdmHandler() stream_handler.formatter = ColorFormatter(fmt) logger.addHandler(stream_handler) return logger
def setup_logging(): logger = logging.getLogger() formatter = logging.Formatter("%(asctime)s [%(levelname)s] - %(message)s") logger.setLevel(logging.DEBUG) logs_folder = config['logsFolder'] os.makedirs(logs_folder, exist_ok=True) file_handler = RotatingFileHandler(os.path.join(logs_folder, 'zippero-server.log'), maxBytes=1024 * 1024 * 10, backupCount=3) file_handler.formatter = formatter logger.addHandler(file_handler) stream_handler = logging.StreamHandler() stream_handler.formatter = formatter logger.addHandler(stream_handler)
from wsgi.plot import Plot from wsgi.processor import Processor app = Flask(__name__) if os.environ.get('OPENSHIFT_APP_NAME') is not None: app.logger.info('Using pro') app.config.from_object('wsgi.configuration.ProductionConfig') else: app.config.from_object('wsgi.configuration.DevelopmentConfig') log_file_handler = RotatingFileHandler(filename=app.config['LOGGING_DEST'] + 'ghost.log', maxBytes=1024 * 1000, backupCount=10) log_file_handler.setLevel(logging.WARNING) app.logger.addHandler(log_file_handler) log_file_handler.formatter = logging.Formatter(fmt='%(asctime)s - %(levelname)s - %(message)s') app.debug = app.config['DEBUG'] app.logger.debug(app.config.items()) app.logger.info('Setup complete') client = pymongo.MongoClient(app.config['MONGODB_HOST'], int(app.config['MONGODB_PORT'])) db = client[app.config['DB_NAME']] if app.config['PRODUCTION']: db.authenticate(app.config['DB_USER'], app.config['DB_PASSWORD']) app.logger.info('DB setup complete') processor = Processor(db) @app.route('/summary/<int:year>/<int:day>', methods=['GET']) def summary(year, day): result = processor.summary(year, day)
embedding, s1, s2), status=200, content_type='application/json') except KeyError: content = {'message': 'INVALID PARAMS'} return Response(content, status=400, content_type='application/json') @app.after_request def after_request(response): if request.method == 'GET': app.logger.info(request.method, request.path, request.args) elif request.method == 'POST': app.logger.info(request.method, request.path) return response if __name__ == '__main__': formatter = logging.Formatter("[%(levelname)s - %(message)s]") logFileName = 'logs/{}.log'.format( datetime.datetime.fromtimestamp( time.time()).strftime('%Y_%m_%d_%H_%M')) handler = RotatingFileHandler(logFileName, maxBytes=10000000) handler.formatter = formatter handler.setLevel(logging.INFO) app.logger.addHandler(handler) log = logging.getLogger('werkzeug') log.setLevel(logging.DEBUG) log.addHandler(handler) app.run(threaded=True)
import pickle record = pickle.loads(kwargs["record"]) cherrypy.engine.log_cache.put(record) return "Done" if __name__ == "__main__": from logging.handlers import RotatingFileHandler from logging import Formatter handler = RotatingFileHandler("httplogger.txt", maxBytes=100 * 1024, backupCount=10) handler.formatter = Formatter("%(asctime)s\t[%(process)s:%(thread)s]\t%(name)s\t%(levelname)s\t%(message)s") def write(): while True: try: handler.emit(cherrypy.engine.log_cache.get_nowait()) except Empty: return from cherrypy.process import plugins cherrypy.engine.log_writer = plugins.Monitor(cherrypy.engine, write, 15)
app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = SQLALCHEMY_DATABASE_URI app.register_blueprint(pages) app.register_blueprint(api) app.register_blueprint(rs) app.register_error_handler(400, bad_request) app.register_error_handler(401, unauthorized) app.register_error_handler(404, not_found) app.register_error_handler(500, internal_server_error) app.register_error_handler(501, not_implemented) # Logging handler initialization handler = RotatingFileHandler(APPLICATION_LOG_FILE, maxBytes=10000, backupCount=2) handler.setLevel(logging.INFO) handler.formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') # Logger initialization log = Logger('Application-log') log.addHandler(handler) log.info('Application logger initialized') # Cache registration with Flask app cache.init_app(app) log.info('Cache initialized') # Database registration with Flask app db.app = app
""" import logging from logging.handlers import RotatingFileHandler import yaml from dug_seis.processing.processing import processing import numpy as np logger = logging.getLogger() logger.setLevel(logging.DEBUG) formatter = logging.Formatter('%(asctime)s %(levelname)-7s %(message)s') verbose = False ch = logging.StreamHandler() ch.setLevel(logging.DEBUG if verbose else logging.INFO) ch.setFormatter(formatter) logger.addHandler(ch) log = 'dug-seis.log' fh = RotatingFileHandler(log) fh.setLevel(logging.DEBUG if verbose else logging.INFO) fh.formatter = formatter logger.addHandler(fh) logger.info('DUG-Seis started') f = open('dug-seis.yaml') param = yaml.load(f) param['General']['sensor_coords'] = np.reshape( param['General']['sensor_coords'], [param['General']['sensor_count'], 3]) processing(param)
class NullFormater(): def format(self, record): return record logging_filename = ConfigGlobals.LoggingFilename file_handler = None if logging_filename: logging_filename = f'{logging_filename}_{os.getpid()}.log' file_handler = RotatingFileHandler(logging_filename, maxBytes=10 * 1024 * 1024, backupCount=9, encoding='utf-8') file_handler.formatter = NullFormater() def dump_to_logfile(arg, *args, **kwargs): if file_handler: file_handler.handle(arg) class Log: """ Объявление новой категории лога @param log_name: название категории @param context: контекст лога (None если нет четкого контекста) @param color_pattern: цветовой паттерн для выделения цветом в логе @param action: дополнительное действие при логе @param specific_source: специфичный источник сообщения