def logSetup(self): ProjectUtils.makePath(LOG_FILE_PATH) logger = logging.getLogger("fast-api-logger") logger2 = logging.getLogger("fast-api-server-logger") logger2.setLevel(logging.DEBUG) logger.setLevel(logging.DEBUG) formatter = logging.Formatter( fmt='%(asctime)s %(name)-12s %(levelname)-8s %(message)s', datefmt='%m-%d-%y %H:%M:%S') fh = TimedRotatingFileHandler(LOG_FILE_PATH + r"\\server.log", when='D', interval=1, backupCount=45) fh.namer = lambda name: name.replace("server.log", "").replace( ".", "") + "_server.log" fh.setFormatter(formatter) logger.addHandler(fh) fh2 = TimedRotatingFileHandler(LOG_FILE_PATH + r"\\application.log", when='D', interval=1, backupCount=45) fh2.namer = lambda name: name.replace("application.log", "").replace( ".", "") + "_application.log" fh2.setFormatter(formatter) logger.addHandler(logging.StreamHandler(sys.stdout)) logger2.addHandler(fh2) return logger
def add_file_handler(filename): handler = TimedRotatingFileHandler(filename=filename, when='midnight') handler.setFormatter(logging.Formatter(fmt=log_format)) handler.namer = file_namer handler.rotator = file_rotator log.addHandler(handler) log.info('Writing log to %s', filename)
def __init__(self, path=""): if path: self.__path = path pathos = Path(path) pathos.parent.mkdir(parents=True, exist_ok=True) self.__logger = logging.getLogger("ePiframe") self.__logger.setLevel(logging.INFO) handler = TimedRotatingFileHandler(path, when="midnight", interval=1, backupCount=6) def namer(name): return name + ".gz" def rotator(source, dest): with open(source, "rb") as sf: data = sf.read() with gzip.open(dest, "wb") as df: df.write(data) os.remove(source) handler.rotator = rotator handler.namer = namer self.__logger.addHandler(handler) else: self.__path = ""
def init(base_name, dir_name='', log_level='INFO'): logger = logging.getLogger() logger.setLevel(logging.getLevelName(log_level)) if dir_name: os.makedirs(dir_name, exist_ok=True) base_file_name = base_name if not dir_name else dir_name + '/' + base_name log_fmt = '%(asctime)s.%(msecs)03d %(levelname)9s [%(threadName)s] ' \ '%(filename)s:%(lineno)s - %(funcName)s : %(message)s' log_date_fmt = '%Y-%m-%d %H:%M:%S' formatter = logging.Formatter(fmt=log_fmt, datefmt=log_date_fmt) file_handler = TimedRotatingFileHandler(base_file_name, when='D', interval=1, backupCount=10) file_handler.setFormatter(formatter) def namer(default_name): default_name += ".log" return default_name file_handler.namer = namer logger.addHandler(file_handler) coloredlogs.install(level=logging.getLevelName(log_level), fmt=log_fmt, datefmt=log_date_fmt)
def get_file_handler(): file_handler = TimedRotatingFileHandler(LOG_FILE, when='midnight', backupCount=10, encoding='utf-8') file_handler.suffix = '%Y%m%d' file_handler.namer = namer file_handler.extMatch = re.compile(r"^\d{8}$") file_handler.setFormatter(FORMATTER) return file_handler
def logToFile(filename=None): if not filename: filename = '/var/log/myDevices/cayenne.log' handler = TimedRotatingFileHandler(filename, when="midnight", interval=1, backupCount=7) handler.setFormatter(LOG_FORMATTER) handler.rotator=rotator handler.namer=namer LOGGER.addHandler(handler)
def getFileLogger(name: str, fPath: str, backupCount: int, numRollingHrs: int) -> LoggerAdapter: logger = logging.getLogger(name) logger.setLevel(logging.INFO) # streamHandler = logging.StreamHandler() fileHandler = TimedRotatingFileHandler( fPath, backupCount=backupCount, when='h', interval=numRollingHrs) fileHandler.namer = lambda name: name.replace(".log", "") + ".zip" fileHandler.rotator = rotator streamFormatter = logging.Formatter("%(message)s") # streamHandler.setFormatter(streamFormatter) fileHandler.setFormatter(streamFormatter) # logger.addHandler(streamHandler) logger.addHandler(fileHandler) loggerAdapter = logging.LoggerAdapter(logger, extra={}) return loggerAdapter
def setup_logger(log_namer, base_log_filename): # https://stackoverflow.com/questions/30079242/python-logging-handler-not-appending-formatted-string-to-log # https://docs.python.org/3.6/howto/logging.html#handlers logger = logging.getLogger(__name__) # d - means rotate daily log_handler = TimedRotatingFileHandler(base_log_filename, when="d", interval=1, backupCount=365) log_handler.setFormatter( logging.Formatter(fmt='%(asctime)s - %(levelname)s - %(message)s', datefmt='%Y-%m-%d %a %H:%M:%S')) log_handler.namer = log_namer logger.setLevel(logging.INFO) logger.addHandler(log_handler) return logger
def pure_api_record_logger(name='pure_api_record', dirname=dirname, type='pure-api-record-type'): path = dirname + '/' + type + '.log' logger = logging.getLogger('experts_etl.' + name) logger.setLevel(logging.INFO) handler = TimedRotatingFileHandler( path, when='S', interval=86400, # seconds/day backupCount=365) handler.setFormatter(PureApiRecordFormatter()) handler.rotator = rotator handler.namer = namer logger.addHandler(handler) return logger
def command_line_interface(): parser = argparse.ArgumentParser(description='Scrape remote weblisting') parser.add_argument('--url', type=str, help='url to scrape') parser.add_argument('--output', default='./') parser.add_argument('--config', type=str) parser.add_argument('--service', type=bool, default=False) parser.add_argument('--logfile', type=str, default=None) parser.add_argument('--debug', type=bool, default=False) args = parser.parse_args() FORMAT = '%(levelname)-8s | %(asctime)-15s | %(pathname)-15s +%(lineno)-4d | %(message)s' logFormatter = logging.Formatter(FORMAT) logging.basicConfig(format=FORMAT) logger = logging.getLogger() logger.setLevel(logging.INFO) if args.logfile is not None: logHandler = TimedRotatingFileHandler(args.logfile, when="d", interval=5, backupCount=5) logHandler.rotator = log_rotator logHandler.namer = log_namer logHandler.setFormatter( logFormatter ) logHandler.setLevel(logging.INFO) logger.addHandler(logHandler) if args.debug or args.debug == "True": logger.setLevel(logging.DEBUG) if (args.url is None) and (args.config is None): raise RuntimeError('Requires either --url or --config ') if args.config is not None: with open(args.config) as f: config = yaml.safe_load(f) else: config = {'scraped_site': {'url': args.url, 'download_location': args.output}, } config['service'] = args.service logging.info(f'Running with config {config}') return config
def experts_etl_logger(name='experts_etl', dirname=dirname): path = dirname + '/' + name + '.log' logger = logging.getLogger('experts_etl.' + name) logger.setLevel(logging.INFO) handler = TimedRotatingFileHandler( path, when='S', interval=86400, # seconds/day backupCount=365) handler.setFormatter( ExpertsEtlFormatter( '%(timestamp)s %(levelname)s %(name)s %(message)s %(pathname)s %(funcName)s %(lineno)i' )) handler.rotator = rotator handler.namer = namer logger.addHandler(handler) return logger
import logging from logging.handlers import TimedRotatingFileHandler import os logger = logging.getLogger('chat.server') logger.setLevel(logging.INFO) def get_filename(filename): log_directory = os.path.split(filename)[0] date = os.path.splitext(filename)[1][1:] filename = os.path.join(log_directory, date) if not os.path.exists(f'{filename}.log'): return f'{filename}.log' logger_handler = TimedRotatingFileHandler('realtime.log', when='d', interval=1, backupCount=10) logger_handler.suffix = 'client_%Y-%m-%d' logger_handler.namer = get_filename logger_handler.setFormatter( logging.Formatter( '%(asctime)s - %(levelname)s - %(module)s - %(message)s')) logger.addHandler(logger_handler) logger.info('Логирование включено!')
def setup_logging(config=None, debug_mode=False): """ Configure logging based on config data and debug mode status. """ log_path = current_path + '/logs' log_ext = '.log' log_modes = config["log_mode"] log_prefix = log_path + '/output' single_log_path = log_prefix + log_ext log_format = "%(asctime)s [%(levelname)s] %(message)s" log_date_style = "%m/%d/%Y %I:%M:%S %p" log_format_style = "%Y-%m-%d_%H-%M-%S" start_timestamp = datetime.now().strftime(log_format_style) # Adjust log level depending on mode log_level = logging.INFO if debug_mode: log_level = logging.DEBUG # Setup Main Logger and Formatting root_logger = logging.getLogger("DRPLogger") root_logger.setLevel(log_level) log_formatter = logging.Formatter(log_format, log_date_style) color_formatter = log_formatter # Setup handlers depending on config options if "full" in log_modes or "v1" in log_modes or "console" in log_modes: console_handler = logging.StreamHandler(stream=sys.stdout) if ("full" in log_modes or "color" in log_modes ) and "no-color" not in log_modes and "plain" not in log_modes: # Import Optional Colored Formatter, if able try: from colored_log import ColoredFormatter color_formatter = ColoredFormatter(log_format, log_date_style) console_handler.setFormatter(color_formatter) except (ModuleNotFoundError, TypeError): pass else: console_handler.setFormatter(log_formatter) console_handler.setLevel(log_level) root_logger.addHandler(console_handler) if "full" in log_modes or "multiple_files" in log_modes or "files" in log_modes: if not os.path.exists(log_path): os.makedirs(log_path) staged_handler = TimedRotatingFileHandler(single_log_path, when="midnight", interval=1) staged_handler.suffix = start_timestamp staged_handler.namer = lambda name: name.replace(log_ext, "") + log_ext should_roll_over = os.path.isfile(single_log_path) if should_roll_over: # log already exists, roll over! staged_handler.doRollover() staged_handler.setFormatter(log_formatter) staged_handler.setLevel(log_level) root_logger.addHandler(staged_handler) elif "v1" in log_modes or "single_file" in log_modes or "file" in log_modes: file_handler = logging.FileHandler(single_log_path, mode='w') file_handler.setFormatter(log_formatter) file_handler.setLevel(log_level) root_logger.addHandler(file_handler) return root_logger
else: workdir = get_absolute_path(args.workdir) os.makedirs(workdir, exist_ok=True) # Setup rotating file logging def log_file_namer(filename): parts = filename.split('/') parts[-1] = f'dqmgui_{parts[-1][11:]}.log' return '/'.join(parts) if not args.stderr: log_path = os.path.join(workdir, 'logs', 'dqmgui.log') os.makedirs(os.path.dirname(log_path), exist_ok=True) handler = TimedRotatingFileHandler(log_path, when='midnight', interval=1) handler.namer = log_file_namer else: handler = logging.StreamHandler() logger = logging.getLogger() formatter = logging.Formatter( '%(asctime)s - %(levelname)s - %(name)s - %(message)s') handler.setFormatter(formatter) logger.setLevel(logging.INFO) logger.addHandler(handler) asyncio.get_event_loop().run_until_complete( initialize_services(workdir, args.in_memory, args.files, args.renderers)) config_and_start_webserver(args.port)
def get_file_handler(): file_handler = TimedRotatingFileHandler(LOG_FILE, when='W0', delay=True) file_handler.suffix = '%Y-%m-%d' file_handler.namer = get_filename file_handler.setFormatter(FORMATTER) return file_handler
'func': LOG.error, 'msg_prefix': 'Error: ' }, logging.CRITICAL: { 'func': LOG.critical, 'msg_prefix': 'Critical error: ' } } if print_need: print(data_messages[level]['msg_prefix'] + message) data_messages[level]['func'](message_without_new_lines) LOG = logging.getLogger('cli') FORMATTER = \ logging.Formatter('%(asctime)s - %(levelname)s - %(name)s - %(message)s ') ROTATION_LOGGING_HANDLER = \ TimedRotatingFileHandler( LOGS_PATH, when='D', interval=1, backupCount=5, encoding='utf-8') ROTATION_LOGGING_HANDLER.setFormatter(FORMATTER) ROTATION_LOGGING_HANDLER.setLevel(logging.DEBUG) ROTATION_LOGGING_HANDLER.namer = change_filename LOG.addHandler(ROTATION_LOGGING_HANDLER) LOG.setLevel(logging.DEBUG)