def get_handlers(self, log_name): logger_dir = LOGGER_CONF["path"] logger_fmt = LOGGER_CONF["format"] # logger_size = int(LOGGER_CONF["size"]) logger_level = LOGGER_CONF["level"].upper() if not os.path.exists(logger_dir): os.makedirs(logger_dir) def log_type(record, handler): log = logger_fmt.format( date = record.time, # 日志时间 level = record.level_name, # 日志等级 filename = os.path.split(record.filename)[-1], # 文件名 func_name = record.func_name, # 函数名 lineno = record.lineno, # 行号 msg = record.message, # 日志内容 channel = record.channel, # 通道 pid = self._pid, ppid = self._ppid ) return log # 日志打印到屏幕 log_std = ColorizedStderrHandler(bubble=True, level=logger_level) log_std.formatter = log_type # 日志打印到文件 log_file = TimedRotatingFileHandler(os.path.join(logger_dir, '{}.log'.format(log_name)), date_format='%Y-%m-%d', rollover_format='{basename}_{timestamp}{ext}', bubble=True, level=logger_level, encoding='utf-8') log_file.formatter = log_type logbook.set_datetime_format("local") return [log_std, log_file]
def logger(logger_name, file=None): logbook.set_datetime_format("local") StreamHandler(sys.stdout).push_application() if file is not None: FileHandler(os.path.join(file, 'log.log'), bubble=False).push_application() log = Logger(logger_name) return log
def __init__(self, name='app', filename=app.config['LOG_NAME']): # Logger标识默认为app self.handler = logbook.FileHandler(filename, encoding='utf-8') logbook.set_datetime_format("local") # 将日志时间设置为本地时间 self.logger = logbook.Logger(name) self.handler.push_application()
def config_logbook_pp(log_dir: str, infile_name: str, log_to_file: bool=True, file_level: str='INFO', log_type: str="pp", stream_level: str='DEBUG') -> None: """ Configure logging via logbook for assorted preprocessing scripts. """ logbook.set_datetime_format("local") logname = "{}_{}.log".format(infile_name, log_type) log_path = norman_path(log_dir, logname) fileutils.mkdir_p(log_dir) if log_to_file: file_handler = TimedRotatingFileHandler(log_path, level=file_level, backup_count=7) else: file_handler = NullHandler() stream_handler = StreamHandler(stream=stdout, level=stream_level, bubble=True) with file_handler, stream_handler: yield
def configure_logging(log_level=None, log_file=None, simplified_console_logs=False): """ This should be called once as early as possible in app startup to configure logging handlers and formatting. :param log_level: The level at which to record log messages (DEBUG|INFO|NOTICE|WARNING|ERROR|CRITICAL) :type log_level: str :param log_file: The file to write logs to, or None to disable logging to a file :type log_file: str | None :param simplified_console_logs: Whether or not to use the simplified logging format and coloring :type simplified_console_logs: bool """ # Set datetimes in log messages to be local timezone instead of UTC logbook.set_datetime_format('local') # Redirect standard lib logging to capture third-party logs in our log files (e.g., tornado, requests) logging.root.setLevel( logging.WARNING ) # don't include DEBUG/INFO/NOTICE-level logs from third parties logbook.compat.redirect_logging(set_root_logger_level=False) # Add a NullHandler to suppress all log messages lower than our desired log_level. (Otherwise they go to stderr.) NullHandler().push_application() log_level = log_level or Configuration['log_level'] format_string, log_colors = _LOG_FORMAT_STRING, _LOG_COLORS if simplified_console_logs: format_string, log_colors = _SIMPLIFIED_LOG_FORMAT_STRING, _SIMPLIFIED_LOG_COLORS # handler for stdout log_handler = _ColorizingStreamHandler( stream=sys.stdout, level=log_level, format_string=format_string, log_colors=log_colors, bubble=True, ) log_handler.push_application() # handler for log file if log_file: fs.create_dir(os.path.dirname(log_file)) previous_log_file_exists = os.path.exists(log_file) event_handler = _ColorizingRotatingFileHandler( filename=log_file, level=log_level, format_string=_LOG_FORMAT_STRING, log_colors=_LOG_COLORS, bubble=True, max_size=Configuration['max_log_file_size'], backup_count=Configuration['max_log_file_backups'], ) event_handler.push_application() if previous_log_file_exists: # Force application to create a new log file on startup. event_handler.perform_rollover(increment_logfile_counter=False) else: event_handler.log_application_summary()
def test_tz_aware(activation_strategy, logger): """ tests logbook.set_datetime_format() with a time zone aware time factory """ class utc(tzinfo): def tzname(self, dt): return 'UTC' def utcoffset(self, dt): return timedelta(seconds=0) def dst(self, dt): return timedelta(seconds=0) utc = utc() def utc_tz(): return datetime.now(tz=utc) FORMAT_STRING = '{record.time:%H:%M:%S.%f%z} {record.message}' handler = logbook.TestHandler(format_string=FORMAT_STRING) with activation_strategy(handler): logbook.set_datetime_format(utc_tz) try: logger.warn('this is a warning.') record = handler.records[0] finally: # put back the default time factory logbook.set_datetime_format('utc') assert record.time.tzinfo is not None
def initLogger(filename, fileLogFlag=True, stdOutFlag=False): LOG_DIR = os.path.join('log') if not os.path.exists(LOG_DIR): os.makedirs(LOG_DIR) logbook.set_datetime_format('local') logger = logbook.Logger(filename) logger.handlers = [] if fileLogFlag: logFile = logbook.TimedRotatingFileHandler(os.path.join( LOG_DIR, '%s.log' % 'log'), date_format='%Y-%m-%d', bubble=True, encoding='utf-8') logFile.formatter = logFormate logger.handlers.append(logFile) if stdOutFlag: logStd = logbook.more.ColorizedStderrHandler(bubble=True) logStd.formatter = logFormate logger.handlers.append(logStd) return logger
def __init__(self, log_folder, log_name=None, max_size=100, backup_count=10): """ log_folder: 日志文件夹 log_name: 日志文件名称 max_size: 单个日志文件的大小,单位 MB backup_count: 总备份数量,默认为 5 log_path: 日志文件全路径 注意:所有 handler 中的 bubble 表示记录是否给下个 handler 用。 """ # 设置日志信息时间的格式 set_datetime_format('local') self.log_folder = log_folder self.log_name = str(log_name) if log_name else 'pms' self.log_path = self.__file_path() # 检查存放日志的文件夹是否存在,不存在则创建 self.__check_path() self.log_ = Logger(self.log_name.split('.')[0]) self.log_.handlers.append( RotatingFileHandler(filename=self.log_path, mode='a', level='INFO', max_size=max_size * 1024**2, backup_count=backup_count, bubble=True)) self.log_.handlers.append(ColorizedStderrHandler(bubble=False))
def __init__(self, location='temp', home_path=os.getcwd()): #Sets up the logbook - if being used in a pipeline, then the home_path can be pushed through, otherwise, the current working directory is taken to be the home_path #Ideally this will be a parameter in the sys_conf.yaml file, but you need the logbook established before reading in the .yaml file..... logbook.FileHandler(home_path + '/error_output.txt', 'a').push_application() self.logger = logbook.Logger(self.__class__.__name__) logbook.set_datetime_format("local") self.logger.info('Class Initialised!') #Finds the path of this module and uses the known directory tree of CX-ASAP to find the config file self.conf_path = pathlib.Path( os.path.abspath(__file__)).parent.parent.parent / 'conf.yaml' with open(self.conf_path, 'r') as f: try: self.cfg = yaml.load(f) except yaml.YAMLERROR as error: self.logger.critical( f'Failed to open config file with {error}') exit() if location == 'temp': os.chdir(self.cfg['analysis_path'])
def _create_log_handler(config, add_hostname=False): logbook.set_datetime_format("local") handlers = [logbook.NullHandler()] format_str = " ".join(["[{record.time:%Y-%m-%d %H:%M}]", "{record.extra[source]}:" if add_hostname else "", "{record.message}"]) log_dir = get_log_dir(config) if log_dir: if not os.path.exists(log_dir): utils.safe_makedir(log_dir) # Wait to propagate, Otherwise see logging errors on distributed filesystems. time.sleep(5) handlers.append(logbook.FileHandler(os.path.join(log_dir, "%s.log" % LOG_NAME), format_string=format_str, level="INFO", filter=_not_cl)) handlers.append(logbook.FileHandler(os.path.join(log_dir, "%s-debug.log" % LOG_NAME), format_string=format_str, level="DEBUG", bubble=True, filter=_not_cl)) handlers.append(logbook.FileHandler(os.path.join(log_dir, "%s-commands.log" % LOG_NAME), format_string=format_str, level="DEBUG", filter=_is_cl)) email = config.get("email", config.get("resources", {}).get("log", {}).get("email")) if email: email_str = u'''Subject: [bcbio-nextgen] {record.extra[run]} \n\n {record.message}''' handlers.append(logbook.MailHandler(email, [email], format_string=email_str, level='INFO', bubble = True)) handlers.append(logbook.StreamHandler(sys.stderr, format_string=format_str, bubble=True, filter=_not_cl)) return CloseableNestedSetup(handlers)
def lb_fs_monitor(base_dir: str, log_dir: str, logging_lvl: str='INFO', log_type: str="fs_monitor") -> None: """ Configure logging via logbook for assorted File System Monitoring processes. *logging_level is for setting the level for logs written to file as the StreamHandler via stdout logs all records by default (Passing stream=False will stop any logs to going from standard out). """ fmt_string = ("[{record.time:%Y-%m-%d %H:%M:%S}] - " "{record.level} - " "{record.func_name} - " "{record.message}") logbook.set_datetime_format("local") veribuild_dir(log_dir) log_file = "{}_{}.log".format(split(base_dir)[-1], log_type) log_path = join_n_norm(log_dir, log_file) file_handler = logbook.TimedRotatingFileHandler(log_path, level=logging_lvl, format_string=fmt_string, backup_count=21) stream_handler = logbook.StreamHandler(stdout, level=logging_lvl, format_string=fmt_string, bubble=True) file_handler.push_application() stream_handler.push_application()
def get_logger(name='appium', file_log=file_stream, level=''): logbook.set_datetime_format('local') ColorizedStderrHandler(bubble=False, level=level).push_thread() logbook.TimedRotatingFileHandler( os.path.join(LOG_DIR, '%s.log' % name), date_format='%Y-%m-%d-%H', bubble=True, encoding='utf-8').push_thread() return logbook.Logger(name)
def lb_pp(log_dir: str, in_file: str, logging_level: str, log_type: str="pp") -> None: """ Configure logging via logbook for assorted preprocessing scripts. *logging_level is for setting the level for logs written to file as the StreamHandler via stdout logs all records (set to DEBUG). """ logbook.set_datetime_format("local") veribuild_dir(log_dir) log_file = "{}_{}.log".format(in_file, log_type) log_path = join_n_norm(log_dir, "{}".format(log_file)) fmt_string = ("[{record.time:%Y-%m-%d %H:%M:%S}] - " "level:{record.level} - " "filename:{record.filename} - " "line:{record.lineno} - " "function:{record.func_name} - " "message:{record.message}") timed_rot_fh = logbook.TimedRotatingFileHandler(log_path, level=logging_level, format_string=fmt_string, backup_count=21) stream_h = logbook.StreamHandler(stdout, format_string=fmt_string, bubble=True) timed_rot_fh.push_application() stream_h.push_application()
def init_logger(): """ get logger Factory function """ logbook.set_datetime_format("local") #格式化时间 run_log.handlers = [] run_log.handlers.append(log_file) run_log.handlers.append(log_std) return run_log
def get_logger(name=APP_NAME, file_log=False): logbook.set_datetime_format('local') ## 是否输出到sys.stdout ColorizedStderrHandler(bubble=False).push_application() if file_log: logbook.TimedRotatingFileHandler(LOG_PATH, date_format='%Y%m%d', bubble=True).push_application() return logbook.Logger(name)
def init(self): self.filename, file_extension = os.path.splitext(os.path.basename(__file__)) config_file = os.path.join(os.path.abspath(os.path.dirname(__file__)), '../config', 'config.ini') log_file = os.path.join(os.path.abspath(os.path.dirname(__file__)), '../logs', '%s.log' % self.filename) # load config self.config = configparser.ConfigParser(strict=False, allow_no_value=True) self.config.read(config_file) # init logger logbook.set_datetime_format("local") self.logger = logbook.Logger(name=self.filename) format_string = '%s %s' % ('[{record.time:%Y-%m-%d %H:%M:%S.%f%z}] {record.level_name}', '{record.channel}:{record.lineno}: {record.message}') if self.config.has_option('handler_stream_handler', 'verbose'): loghandler = logbook.StreamHandler(sys.stdout, level=self.config.get('Logger', 'level'), bubble=True, format_string=format_string) self.logger.handlers.append(loghandler) loghandler = logbook.TimedRotatingFileHandler(log_file, level=self.config.get('Logger', 'level'), date_format='%Y%m%d', backup_count=5, bubble=True, format_string=format_string) self.logger.handlers.append(loghandler) else: loghandler = logbook.TimedRotatingFileHandler(log_file, level=self.config.get('Logger', 'level'), date_format='%Y%m%d', backup_count=5, bubble=True, format_string=format_string) self.logger.handlers.append(loghandler) self.db = NewsparserDatabaseHandler.instantiate_from_configparser(self.config, self.logger)
def main(): logbook.set_datetime_format("local") logbook.compat.redirect_logging() settings = ProdSettings() if not env('HNFP_DEBUG') else DevSettings() settings.setup(env) config = { 'token': env('HNFP_TOKEN'), 'token_secret': env('HNFP_TOKEN_SECRET'), 'consumer_key': env('HNFP_CONSUMER_KEY'), 'consumer_secret': env('HNFP_CONSUMER_SECRET'), } klass = settings.get_twitter() twitter = klass(**config) hackernews_frontpage = HackerNewsFrontPage(env('HNFP_DB_FILE'), twitter) firebase = FirebaseStreamingEvents(hackernews_frontpage) while True: try: firebase.run() except KeyboardInterrupt: _logger.info("Shutdown signal received, stopping..") return except Exception as e: _logger.exception("Exception: " + str(e)) settings.report_exception(e)
def _create_log_handler(config, add_hostname=False, direct_hostname=False, write_toterm=True): logbook.set_datetime_format("utc") handlers = [logbook.NullHandler()] format_str = "".join([ "[{record.time:%Y-%m-%dT%H:%MZ}] " if config.get("include_time", True) else "", "{record.extra[source]}: " if add_hostname else "", "%s: " % (socket.gethostname)() if direct_hostname else "", "{record.message}" ]) log_dir = get_log_dir(config) if log_dir: if not os.path.exists(log_dir): utils.safe_makedir(log_dir) # Wait to propagate, Otherwise see logging errors on distributed filesystems. time.sleep(5) handlers.append( logbook.FileHandler(os.path.join(log_dir, "%s.log" % LOG_NAME), format_string=format_str, level="INFO", filter=_not_cl)) handlers.append( logbook.FileHandler(os.path.join(log_dir, "%s-debug.log" % LOG_NAME), format_string=format_str, level="DEBUG", bubble=True, filter=_not_cl)) handlers.append( logbook.FileHandler(os.path.join(log_dir, "%s-commands.log" % LOG_NAME), format_string=format_str, level="DEBUG", filter=_is_cl)) if write_toterm: handlers.append( logbook.StreamHandler(sys.stdout, format_string="{record.message}", level="DEBUG", filter=_is_stdout)) handlers.append( logbook.StreamHandler(sys.stderr, format_string=format_str, bubble=True, filter=_not_cl)) email = config.get("email", config.get("resources", {}).get("log", {}).get("email")) if email: email_str = u'''Subject: [bcbio-nextgen] {record.extra[run]} \n\n {record.message}''' handlers.append( logbook.MailHandler(email, [email], format_string=email_str, level='INFO', bubble=True)) return CloseableNestedSetup(handlers)
def init_logging(): import logbook import logbook.more logbook.set_datetime_format('local') logbook.NullHandler(level=0).push_application() logbook.more.ColorizedStderrHandler(level='INFO').push_application() logbook.RotatingFileHandler(config_path('roland.log'), level='INFO', bubble=True).push_application()
def init_logger(): logbook.set_datetime_format("local") user_log.handlers = [] user_log.handlers.append(user_std_handler) user_log.handlers.append(user_file_handler)
def get_logger(name='SysLog', file_log=file_stream, level=''): """ get logger Factory function """ logbook.set_datetime_format('local') ColorizedStderrHandler(bubble=False, level=level).push_thread() logbook.TimedRotatingFileHandler( os.path.join(LOG_DIR, '%s.log' % name), date_format='%Y-%m-%d-%H', bubble=True, encoding='utf-8').push_thread() return logbook.Logger(name)
def init_logger(): user_log = Logger("user_log") user_std_handler, file_handler = print_handler() logbook.set_datetime_format("local") user_log.handlers = [] user_log.handlers.append(user_std_handler) user_log.handlers.append(file_handler) return user_log
def set_logger(): global g_logger init_logger() logbook.set_datetime_format("local") g_logger.handlers = [] # g_logger.handlers.append(user_std_handler) g_logger.handlers.append(user_file_handler) import_log_funcs()
def get_logger(name='jiekou', file_log=file_stream, level=''): """ get logger Factory function """ logbook.set_datetime_format('local') ColorizedStderrHandler(bubble=False, level=level).push_thread() logbook.TimedRotatingFileHandler( os.path.join(log_path, '%s.log' % name), date_format='%Y-%m-%d-%H', bubble=True, encoding='utf-8').push_thread() return logbook.Logger(name)
def get_logger(name="interface_Test", file_log=file_stream, level=''): """ get logger Factory function """ logbook.set_datetime_format("local") ColorizedStderrHandler(bubble=False, level=level).push_thread() logbook.TimedRotatingFileHandler( os.path.join(LOG_DIR, "%s.log" %"Log"), date_format="%Y-%m-%d-%H", bubble=True, encoding="utf-8").push_thread() return logbook.Logger(name)
def get_logger(name='LOGBOOK', log_path='', file_log=False): logbook.set_datetime_format('local') ColorizedStderrHandler(bubble=True).push_application() log_dir = os.path.join('log') if not log_path else log_path if not os.path.exists(log_dir): os.makedirs(log_dir) if file_log: TimedRotatingFileHandler(os.path.join(log_dir, '%s.log' % name.lower()), date_format='%Y-%m-%d', bubble=True).push_application() return Logger(name)
def get_logger(name='system', level=''): """ get logger Factory function """ logbook.set_datetime_format('local') ColorizedStderrHandler(bubble=False, level=level).push_thread() logbook.StreamHandler(sys.stdout, bubble=False, encoding='utf-8', level=level).push_thread() return logbook.Logger(name)
def get_logger(name='log', level='INFO'): logbook.set_datetime_format('local') # 输出到控制台 ColorizedStderrHandler(bubble=False, level=level).push_thread() # 写入log文件 # logbook.TimedRotatingFileHandler( # os.path.join(log_dir, '%s.log' % name), # date_format='%Y-%m-%driver-%H', bubble=True, encoding='utf-8').push_thread() return logbook.Logger(name)
def configure_logging(log_level=None, log_file=None, simplified_console_logs=False): """ This should be called once as early as possible in app startup to configure logging handlers and formatting. :param log_level: The level at which to record log messages (DEBUG|INFO|NOTICE|WARNING|ERROR|CRITICAL) :type log_level: str :param log_file: The file to write logs to, or None to disable logging to a file :type log_file: str | None :param simplified_console_logs: Whether or not to use the simplified logging format and coloring :type simplified_console_logs: bool """ # Set datetimes in log messages to be local timezone instead of UTC logbook.set_datetime_format('local') # Redirect standard lib logging to capture third-party logs in our log files (e.g., tornado, requests) logging.root.setLevel(logging.WARNING) # don't include DEBUG/INFO/NOTICE-level logs from third parties logbook.compat.redirect_logging(set_root_logger_level=False) # Add a NullHandler to suppress all log messages lower than our desired log_level. (Otherwise they go to stderr.) NullHandler().push_application() log_level = log_level or Configuration['log_level'] format_string, log_colors = _LOG_FORMAT_STRING, _LOG_COLORS if simplified_console_logs: format_string, log_colors = _SIMPLIFIED_LOG_FORMAT_STRING, _SIMPLIFIED_LOG_COLORS # handler for stdout log_handler = _ColorizingStreamHandler( stream=sys.stdout, level=log_level, format_string=format_string, log_colors=log_colors, bubble=True, ) log_handler.push_application() # handler for log file if log_file: fs.create_dir(os.path.dirname(log_file)) previous_log_file_exists = os.path.exists(log_file) event_handler = _ColorizingRotatingFileHandler( filename=log_file, level=log_level, format_string=_LOG_FORMAT_STRING, log_colors=_LOG_COLORS, bubble=True, max_size=Configuration['max_log_file_size'], backup_count=Configuration['max_log_file_backups'], ) event_handler.push_application() if previous_log_file_exists: # Force application to create a new log file on startup. event_handler.perform_rollover(increment_logfile_counter=False) else: event_handler.log_application_summary()
def rotating_logger(name: str, level=INFO, folder: str = default_path('test_logs')) -> Logger: set_datetime_format('local') TimedRotatingFileHandler(f'{folder}/{name}.log', date_format='%Y-%m-%d', bubble=True, level=level, backup_count=60).push_application() return Logger(name)
def metadata(path, email, update): """Download assembly_summary.txt and BioSample metadata.""" logbook.set_datetime_format("local") handler = logbook.TimedRotatingFileHandler(os.path.join( path, ".logs", "metadata.log"), backup_count=10) handler.push_application() genbank = Genbank(path) metadata = genbank.metadata(email=email, update=update) genbank.species_metadata(metadata)
def get_logger(name='接口', level=''): #设置时间格式 logbook.set_datetime_format('local') ColorizedStderrHandler(bubble=False, level=level).push_thread() logbook.TimedRotatingFileHandler(os.path.join(path, '%s.log' % name), date_format='%Y-%m-%d-%H', bubble=True, encoding='utf-8').push_thread() #print('join_path =' + os.path.join(path,'%s.log' % name)) return logbook.Logger(name)
def __init__( self, log_level=LogLevel.INFO, format_str='[{record.time:%Y-%m-%d %H:%M:%S}] - {record.channel} - {record.level_name} ' '- {record.message}'): self.logger = Logger('WindAdapter') set_datetime_format('local') StreamHandler(sys.stdout, format_string=format_str).push_application() FileHandler('WindAdapter.log', bubble=True, format_string=format_str).push_application() self.set_level(log_level)
def logger(name, stream_level=DEBUG, file_level=DEBUG): set_datetime_format('local') StreamHandler(sys.stdout, level=stream_level, bubble=True).push_application() filename = __file__.split('/')[-1][:-3] FileHandler( f'logs/{name}_{datetime.today().strftime("%Y-%m-%d_%H-%M")}.log', bubble=True, level=file_level, delay=True).push_application() return Logger(name)
def handle_logging(path): if not path: return logbook.set_datetime_format("local") logbook.compat.redirect_logging() logbook.RotatingFileHandler( path, level='INFO', format_string="{record.time:%Y-%m-%d %H:%M:%S.%f} : {record.level_name} : {record.channel} : {record.message}" ).push_application()
def init_logger(): """Write error info log file as default. Return the distance of logbook logger. """ filename = inspect.getframeinfo(inspect.currentframe().f_back).filename logger = Logger(filename) logbook.set_datetime_format("local") logger.handlers = [] logger.handlers.append(log_file) logger.handlers.append(log_std) return logger
def cli(ctx, path): """Assess the integrity of your genomes through automated analysis of species-based statistics and metadata. """ if ctx.invoked_subcommand is None: logbook.set_datetime_format("local") handler = logbook.TimedRotatingFileHandler(os.path.join( path, ".logs", "qc.log"), backup_count=10) handler.push_application() genbank = Genbank(path) genbank.qc()
def qc(self): self.prune() for species in self.species(): logbook.set_datetime_format("local") handler = logbook.TimedRotatingFileHandler(Path( species.path, ".logs", "qc.log"), backup_count=10) handler.push_application() try: species.qc() except Exception: self.log.exception(f"qc command failed for {species.name}")
def test_invalid_time_factory(): """ tests logbook.set_datetime_format() with an invalid time factory callable """ def invalid_factory(): return False with pytest.raises(ValueError) as e: try: logbook.set_datetime_format(invalid_factory) finally: # put back the default time factory logbook.set_datetime_format('utc') assert 'Invalid callable value' in str(e.value)
def decorator(*args, **kwargs): from infi.logging.wrappers import script_logging_context from logbook.concurrency import enable_gevent from infi.traceback import traceback_context from os import getpid, getuid from datetime import datetime from docopt import DocoptExit from sys import stderr import logbook if _bypass_console_script_logging: return f(*args, **kwargs) enable_gevent() filename = '/tmp/{}.log'.format(name if name else f.__name__) with script_logging_context(logfile_path=filename, logfile_max_size=20 * 1024 * 1024), traceback_context(), exception_handling_context(): logbook.set_datetime_format("local") logger.info("Calling {}".format(f.__name__)) result = f(*args, **kwargs) logger.info("Call to {} returned {}".format(f.__name__, result)) return result
def lb_streaming(logging_level: str) -> None: """ Configure logging via logbook for assorted preprocessing scripts. *logging_level is for setting the level for logs written to file as the StreamHandler via stdout logs all records (set to DEBUG). """ logbook.set_datetime_format("local") fmt_string = ("[{record.time:%Y-%m-%d %H:%M:%S}] - " "{record.level} - " "{record.func_name} - " "{record.message}") stream_h = logbook.StreamHandler(stdout, format_string=fmt_string, bubble=True, level=logging_level) stream_h.push_application()
def test_timedate_format(activation_strategy, logger): """ tests the logbook.set_datetime_format() function """ FORMAT_STRING = '{record.time:%H:%M:%S.%f} {record.message}' handler = logbook.TestHandler(format_string=FORMAT_STRING) with activation_strategy(handler): logbook.set_datetime_format('utc') try: logger.warn('This is a warning.') time_utc = handler.records[0].time logbook.set_datetime_format('local') logger.warn('This is a warning.') time_local = handler.records[1].time finally: # put back the default time factory logbook.set_datetime_format('utc') # get the expected difference between local and utc time t1 = datetime.now() t2 = datetime.utcnow() tz_minutes_diff = get_total_delta_seconds(t1 - t2)/60.0 if abs(tz_minutes_diff) < 1: pytest.skip("Cannot test utc/localtime differences if they vary by less than one minute...") # get the difference between LogRecord local and utc times logbook_minutes_diff = get_total_delta_seconds(time_local - time_utc)/60.0 assert abs(logbook_minutes_diff) > 1, ('Localtime does not differ from UTC by more than 1 minute (Local: %s, UTC: %s)' % (time_local, time_utc)) ratio = logbook_minutes_diff / tz_minutes_diff assert ratio > 0.99 assert ratio < 1.01
""" .. moduleauthor:: Li, Wang <*****@*****.**> """ import os from string import Template import pandas as pd import logbook logbook.set_datetime_format('local') logger = logbook.Logger('assets') from orca.mongo.barra import BarraFetcher barra_fetcher = BarraFetcher('short') def generate_path(path_pattern, date): return Template(path_pattern).substitute(YYYYMMDD=date, YYYYMM=date[:6], YYYY=date[:4], MM=date[4:6], DD=date[6:8]) def prep_assets_lance(account, date, output): path = os.path.join('/home/liulc/trade_'+account, 'barra', date[:4], date[4:6], date[6:8], 'assets.'+date) df = pd.read_csv(path, header=0, dtype={0: str}).iloc[:, :3] df.columns = ['sid', 'bid', 'alpha'] output = generate_path(output, date) if not os.path.exists(os.path.dirname(output)): os.makedirs(os.path.dirname(output)) df.to_csv(output, index=False, float_format='%.6f') logger.info('Generated file: {}', output) def prep_assets_alpha(alpha, date, output): df = pd.DataFrame({'alpha': alpha.ix[date]}) df['sid'] = df.index
# -*- coding: utf-8 -*- """ local logger """ import logbook from conf import Conf conf = Conf() logbook.set_datetime_format(conf.datetime_format) log = logbook.RotatingFileHandler(conf.logfile, max_size = conf.max_size, \ backup_count = conf.backup_count,\ level = conf.level, \ bubble=False ) #print dir(log) log.format_string = "[{record.time:%Y-%m-%d %H:%M:%S.%f}][{record.thread},{record.module},{record.func_name},{record.lineno}] {record.level_name}: {record.channel}: {record.message}" log.default_format_string = "[{record.time:%Y-%m-%d %H:%M:%S.%f}][{record.thread},{record.module},{record.func_name},{record.lineno}] {record.level_name}: {record.channel}: {record.message}" log.push_application() def get_logger(logger_name): """ get logger """
def get_logger(name): logbook.set_datetime_format('local') StreamHandler(sys.stdout).push_application() return Logger(os.path.basename(name))
def _set_formatting(self, handler, fmt): if config.root.log.localtime: logbook.set_datetime_format("local") if fmt is not None: handler.format_string = fmt
# Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from datetime import datetime import traceback import logbook import better_exceptions from logbook import Logger from logbook.more import ColorizedStderrHandler from .py2 import to_utf8, from_utf8 logbook.set_datetime_format("local") # patch warn logbook.base._level_names[logbook.base.WARNING] = 'WARN' # better_exceptions hot patch def format_exception(exc, value, tb): formatted, colored_source = better_exceptions.format_traceback(tb) if not str(value) and exc is AssertionError: value.args = (colored_source,) title = traceback.format_exception_only(exc, value) title = from_utf8(title[0].strip()) full_trace = u'Traceback (most recent call last):\n{}{}\n'.format(formatted, title)
import time import logbook from logbook import TimedRotatingFileHandler from apscheduler.scheduler import Scheduler from config import Config cfg = Config() conf = cfg.get_conf() logbook.set_datetime_format(conf["logging"]["datetime_format"]) log = logbook.TimedRotatingFileHandler(conf["logging"]["logfile"], \ date_format=conf["logging"]["date_format"]) log.push_application() from notifier import Notifier logger = logbook.Logger("app") def heartbeat(): logger.debug("heartbeat") def main():
def get_logger(name, debug=True): logbook.set_datetime_format('local') handler = StreamHandler(sys.stdout) if debug else NullHandler() handler.push_application() return Logger(os.path.basename(name))