def _create_log_handler(config, add_hostname=False): handlers = [logbook.NullHandler()] format_str = " ".join(["[{record.time:%Y-%m-%d %H:%M}]", "{record.extra[source]}:" if add_hostname else "", "{record.message}"]) log_dir = get_log_dir(config) if log_dir: if not os.path.exists(log_dir): utils.safe_makedir(log_dir) # Wait to propagate, Otherwise see logging errors on distributed filesystems. time.sleep(5) handlers.append(logbook.FileHandler(os.path.join(log_dir, "%s.log" % LOG_NAME), format_string=format_str, level="INFO", filter=_not_cl)) handlers.append(logbook.FileHandler(os.path.join(log_dir, "%s-debug.log" % LOG_NAME), format_string=format_str, level="DEBUG", bubble=True, filter=_not_cl)) handlers.append(logbook.FileHandler(os.path.join(log_dir, "%s-commands.log" % LOG_NAME), format_string=format_str, level="DEBUG", filter=_is_cl)) email = config.get("email", config.get("resources", {}).get("log", {}).get("email")) if email: email_str = u'''Subject: [bcbio-nextgen] {record.extra[run]} \n\n {record.message}''' handlers.append(logbook.MailHandler(email, [email], format_string=email_str, level='INFO', bubble = True)) handlers.append(logbook.StreamHandler(sys.stderr, format_string=format_str, bubble=True, filter=_not_cl)) return CloseableNestedSetup(handlers)
def setup_logger(self): format_string = '[{record.time:%Y-%m-%d %H:%M:%S.%f%z}] {record.level_name} {record.channel} : {record.message} (in {record.filename}:{record.lineno}), args: {record.kwargs})' handler = logbook.FileHandler(self.config.log_filename, format_string = format_string, bubble=True) return logbook.NestedSetup([ handler, ])
def setup_logbook(logfile, logfile_kwargs=None): """Return a basic `logbook` setup which logs to `stderr` and to file.""" if logfile_kwargs is None: logfile_kwargs = {} logfile_kwargs.setdefault('level', 'DEBUG') logfile_kwargs.setdefault('mode', 'w') logfile_kwargs.setdefault('bubble', True) logfile_kwargs.setdefault('format_string', ( '--------------------------------------------------------------------------\n' '[{record.time} {record.level_name:<8s} {record.channel:>10s}]' ' {record.filename:s}:{record.lineno:d}\n{record.message:s}')) logbook_setup = logbook.NestedSetup([ logbook.NullHandler(), logbook.more.ColorizedStderrHandler( level='INFO', bubble=False, format_string= '[{record.level_name:<8s} {record.channel:s}] {record.message:s}'), logbook.FileHandler(logfile, **logfile_kwargs), ]) return logbook_setup
def init_logging(filename: str = None): level = logbook.TRACE logbook.FileHandler(filename, level=level).push_application() msg = 'Logging initialized, level: {}, mode: {}'.format( level, "stdout mode" if not filename else 'file mode: ' + filename) logger = logbook.Logger('Startup') logger.notice(msg)
def getLogger(loggerName='mylogger.log', streamVerb='DEBUG', logFile='log'): # Get a logger instance. logger = logbook.Logger(name=loggerName) # set up logger mySH = logbook.StreamHandler(stream=sys.stdout, level=streamVerb.upper(), format_string='{record.message}', bubble=True) mySH.format_string = '{record.message}' logger.handlers.append(mySH) if logFile: myFH = logbook.FileHandler(filename=logFile, level='DEBUG', bubble=True) myFH.format_string = '{record.message}' logger.handlers.append(myFH) try: stdErr = list( logbook.handlers.Handler.stack_manager.iter_context_objects())[0] stdErr.pop_application() except: pass return logger
def __init__(self, location='temp', home_path=os.getcwd()): #Sets up the logbook - if being used in a pipeline, then the home_path can be pushed through, otherwise, the current working directory is taken to be the home_path #Ideally this will be a parameter in the sys_conf.yaml file, but you need the logbook established before reading in the .yaml file..... logbook.FileHandler(home_path + '/error_output.txt', 'a').push_application() self.logger = logbook.Logger(self.__class__.__name__) logbook.set_datetime_format("local") self.logger.info('Class Initialised!') #Finds the path of this module and uses the known directory tree of CX-ASAP to find the config file self.conf_path = pathlib.Path( os.path.abspath(__file__)).parent.parent.parent / 'conf.yaml' with open(self.conf_path, 'r') as f: try: self.cfg = yaml.load(f) except yaml.YAMLERROR as error: self.logger.critical( f'Failed to open config file with {error}') exit() if location == 'temp': os.chdir(self.cfg['analysis_path'])
def __init__(self, name='app', filename=app.config['LOG_NAME']): # Logger标识默认为app self.handler = logbook.FileHandler(filename, encoding='utf-8') logbook.set_datetime_format("local") # 将日志时间设置为本地时间 self.logger = logbook.Logger(name) self.handler.push_application()
def __init__(self, exchange_id): handler = logbook.FileHandler('audit/{}-p{}.audit'.format( exchange_id.name, os.getpid()), format_string=AuditLog.LOG_FORMAT) self._logger = logbook.Logger(exchange_id.name) self._logger.handlers.append(handler)
def initialize_logger(debug_log_path='immunotyper-debug'): LOG_FORMAT = '{record.message}' if debug_log_path: debug_log_path = debug_log_path + '.log' if os.path.exists(debug_log_path): os.remove(debug_log_path) handler = logbook.NestedSetup([ logbook.NullHandler(), logbook.FileHandler(debug_log_path, level='DEBUG', format_string=LOG_FORMAT), logbook.more.ColorizedStderrHandler(format_string=LOG_FORMAT, level='INFO', bubble=True) ]) else: handler = logbook.NestedSetup([ logbook.NullHandler(), logbook.more.ColorizedStderrHandler(format_string=LOG_FORMAT, level='INFO', bubble=True) ]) handler.push_application()
def create_logger(app): if app.config.get('ENVIRONMENT') == 'production': server_addr = ('localhost', 25) else: server_addr = ('localhost', 2525) mail_handler = logbook.MailHandler( '*****@*****.**', ['*****@*****.**'], server_addr=server_addr, level='DEBUG', format_string=u'''Subject: ERROR at gitorama.com [{record.time:%Y-%m-%d %H:%M}] {record.extra[request_id]}: {record.level_name}: {record.channel}: {record.message}''', related_format_string= u'[{record.time:%Y-%m-%d %H:%M}] {record.extra[request_id]}: {record.level_name}: {record.channel}: {record.message}', ) file_handler = logbook.FileHandler( app.config['LOG_FILE'], level='DEBUG', format_string= u'[{record.time:%Y-%m-%d %H:%M}] {record.extra[request_id]}: {record.level_name}: {record.channel}: {record.message}' ) def inject_id(record): record.extra['request_id'] = getattr(_request_ctx_stack.top, 'logbook_request_id', None) logger = logbook.NestedSetup([ logbook.NullHandler(), logbook.FingersCrossedHandler(mail_handler, reset=True), logbook.FingersCrossedHandler(file_handler, reset=True, bubble=True), logbook.Processor(inject_id), ]) return logger
def test_custom_formatter(self): def custom_format(record, handler): return record.level_name + ':' + record.message with logbook.FileHandler(self.filename) as handler: handler.formatter = custom_format self.log.warn('Custom formatters are awesome') with open(self.filename) as f: self.assertEqual(f.readline(), 'WARNING:Custom formatters are awesome\n')
def main(): logbook.StreamHandler(sys.stdout, level='WARNING').push_application() logbook.FileHandler('esync.log', level='DEBUG').push_application() log.debug('loaded config: {}', config) backup = app.MySecuredBackupInstance(config) for path in backup.scan(): print(path) #a.add(path) backup.tar_directories(r'g:\temp\coucou.tar')
def test_file_handler(logfile, activation_strategy, logger): handler = logbook.FileHandler( logfile, format_string='{record.level_name}:{record.channel}:{record.message}',) with activation_strategy(handler): logger.warn('warning message') handler.close() with open(logfile) as f: assert f.readline() == 'WARNING:testlogger:warning message\n'
def _get_file_log_handler(self, subpath): root_path = config.root.log.root if root_path is None: handler = logbook.NullHandler(bubble=False) else: log_path = os.path.join(root_path, subpath.format(context=context)) ensure_containing_directory(log_path) handler = logbook.FileHandler(log_path, bubble=False) return handler
def main(): logbook.StreamHandler(sys.stdout, level='WARNING').push_application() logbook.FileHandler('esync.log', level='DEBUG').push_application() config = yaml.load(open('esync.yaml')) log.debug('loaded config: {}', config) a = app.App(config) for path in a.scan(): a.add(path) a.commit()
def create_log_handler(config, log_name): log_dir = config.get("log_dir", None) if log_dir: if not os.path.exists(log_dir): os.makedirs(log_dir) handler = logbook.FileHandler( os.path.join(log_dir, "%s.log" % log_name)) else: handler = logbook.StreamHandler(sys.stdout) return handler
def test_file_handler(self): handler = logbook.FileHandler(self.filename, format_string='{record.level_name}:{record.channel}:' '{record.message}',) with handler.threadbound(): self.log.warn('warning message') handler.close() with open(self.filename) as f: self.assertEqual(f.readline(), 'WARNING:testlogger:warning message\n')
def init_logging_file(filename, log_level='notset', rotate_log=True, rotate_max_size=10485760): log_dir = os.path.dirname(filename) if not os.path.exists(log_dir): os.makedirs(log_dir) if rotate_log is True: handler = logbook.RotatingFileHandler(filename, level=figure_out_log_level(log_level), max_size=int(rotate_max_size), bubble=True) else: handler = logbook.FileHandler(filename, level=figure_out_log_level(log_level), bubble=True) handler.push_application() get_logger().debug("file based logging initialized in directory: " + log_dir)
def test_custom_formatter(activation_strategy, logfile, logger): def custom_format(record, handler): return record.level_name + ':' + record.message handler = logbook.FileHandler(logfile) with activation_strategy(handler): handler.formatter = custom_format logger.warn('Custom formatters are awesome') with open(logfile) as f: assert f.readline() == 'WARNING:Custom formatters are awesome\n'
def after_configure_app(app): level = get_project().config.get('LOG_LEVEL', logbook.INFO) log_handler = logbook.FileHandler('dhcpawn.log', mode='a', level=int(level)) log_handler.format_string = '[{record.time:%Y-%m-%d %H:%M:%S.%f%z}] {record.level_name}: {record.channel}: {record.func_name}: {record.lineno}: {record.message}' log_handler.push_application() app.ldap_obj = ldap_init() if not app.config['DEBUG'] and not app.config['TESTING']: app.config['SENTRY_DSN'] = get_project().config.get('SENTRY_DSN') Sentry(app)
def setup_simple_file_handler(cls, file_path): """ Push a file handler logging only the message (no timestamp) """ null_handler = logbook.NullHandler() handler = logbook.FileHandler(file_path, format_string='{record.message}', level = 2, bubble = False) # first stack null handler to not have anything else logged null_handler.push_application() # add Stderr Handler handler.push_application()
def start(session=None, logto='file', logfile=None): """Start a session. Load the session file and launch an IPython shell. Every definition that was made in the module file is available via the ``m`` variable. Moreover, the quantities package is already loaded and named ``q``. So, once the session has started you could access motors like this:: $ concert start tomo This is session tomo Welcome to Concert 0.0.1 In [1]: m.crio1.set_positon(2.23 * q.mm) In [2]: m.crio1.get_position() Out[2]: array(2.23) * mm *Additional options*: .. cmdoption:: --logto={stderr, file} Specify a method for logging events. If this flag is not specified, ``file`` is used and assumed to be ``$XDG_DATA_HOME/concert/concert.log``. .. cmdoption:: --logfile=<filename> Specify a log file if ``--logto`` is set to ``file``. """ _exit_if_not_exists(session) handler = None if logto == 'file': filename = logfile if logfile else concert.session.DEFAULT_LOGFILE handler = logbook.FileHandler(filename) else: handler = logbook.StderrHandler() handler.format_string = '[{record.time}] {record.level_name}: \ %s: {record.channel}: {record.message}' % session # Add session path, so that sessions can import other sessions sys.path.append(concert.session.PATH) try: module = concert.session.load(session) except Exception as exception: traceback.print_exc() sys.exit(1) _run_shell(handler, module)
def __init__(self, name, package): root_path = os.path.dirname(__file__) log_dir = '{0}\\{1}\\logs\\'.format( root_path, package) if not os.path.isdir(log_dir): os.mkdir(log_dir) # 日志文件名 logfile = '{0}{1}.log'.format( log_dir, time.strftime("%Y_%m_%d", time.localtime())) self.stderr_handler = logbook.StderrHandler() self.file_handler = logbook.FileHandler(logfile, level='INFO', bubble=True) self.yp_log = logbook.Logger(name)
def init_logging(config): global _logging_initialized if not _logging_initialized: handler = logbook.FileHandler( config.LOG_FILENAME or 'twisted-lock.log', format_string= '[{record.time}] {record.level_name:>5} {record.extra[node]} {record.module}:{record.lineno} {record.message}', ) handler.push_application() observer = LogbookObserver() twisted.python.log.startLoggingWithObserver(observer.emit, setStdout=False) _logging_initialized = True
def __init__(self, location = 'temp', home_path = os.getcwd()): #Sets up the logbook - if being used in a pipeline, then the home_path can be pushed through, otherwise, the current working directory is taken to be the home_path #Ideally this will be a parameter in the sys_conf.yaml file, but you need the logbook established before reading in the .yaml file..... logbook.FileHandler(home_path + '/error_output.txt', 'a').push_application() self.logger = logbook.Logger(self.__class__.__name__) logbook.set_datetime_format("local") self.logger.info('Class Initialised!') #Finds the path of this module and uses the known directory tree of CX-ASAP to find the config file self.conf_path = pathlib.Path(os.path.abspath(__file__)).parent.parent.parent / 'conf.yaml' with open (self.conf_path, 'r') as f: try: self.cfg = yaml.load(f) except yaml.YAMLERROR as error: self.logger.critical(f'Failed to open config file with {error}') exit() if location == 'temp': os.chdir(self.cfg['current_results_path']) #Sets up empty lists/dictionaries to later populate with data self.cif_files = [] self.results = {} self.errors = {} self.structures_in_cif = [] self.successful_positions = [] #Sets these to 0 to reset from previous runs self.cfg['Structures_in_each_CIF'] = self.structures_in_cif self.cfg['Successful_Positions'] = self.successful_positions with open (self.conf_path, 'w') as f: yaml.dump(self.cfg, f) #Pulls parameters from the configuration file as necessary, and uses it to set up an empty dataframe self.search_items = self.cfg['cell_parameters'] for item in self.search_items: self.results[item] = [] self.errors[item] = [] self.data = pd.DataFrame() self.temp_df = pd.DataFrame()
def init_slave_logger(self, slave_name): # get the logs directory and ensure that it exists logdir = config_general.get("logs_dir", "~/.propagator/logs") logdir = os.path.expanduser(logdir) if not os.path.isdir(logdir): os.makedirs(logdir) # fire up a logger with its own handler to redirect to the file logpath = os.path.join(logdir, "remote.{}.log".format(slave_name)) logger = logbook.Logger("slave-{}".format(slave_name)) logger.handlers.append(logbook.FileHandler(logpath)) # done, return logger return logger
def create_log_handler(config, log_name): log_dir = config.get("log_dir", None) email = config.get("email", None) if log_dir: utils.safe_makedir(log_dir) handler = logbook.FileHandler(os.path.join(log_dir, "%s.log" % log_name)) else: handler = logbook.StreamHandler(sys.stdout) if email: handler = logbook.MailHandler(email, [email], format_string=u'''Subject: [BCBB pipeline] {record.extra[run]} \n\n {record.message}''', level='INFO', bubble = True) return handler
def __init__(self, original_path=os.getcwd()): #Set up logbook and config file logbook.FileHandler(original_path + '/error_output.txt', 'a').push_application() self.logger = logbook.Logger(self.__class__.__name__) logbook.set_datetime_format("local") self.logger.info('Class Initialised!') self.conf_path = pathlib.Path( os.path.abspath(__file__)).parent.parent.parent / 'conf.yaml' with open(self.conf_path, 'r') as f: try: self.cfg = yaml.load(f) except yaml.YAMLERROR as error: self.logger.critical( f'Failed to open config file with {error}') exit() self.original_path = original_path self.home = self.cfg['file_name'] + '_' + self.cfg[ 'experiment_type'] + '_analysis' self.tree_structure = [ 'analysis', 'ref', 'results', 'failed_autoprocessing' ] if pathlib.Path(self.original_path).stem == self.home: self.home_path = self.original_path else: self.home_path = os.path.join(self.original_path, self.home) self.analysis_path = os.path.join(self.home_path, self.tree_structure[0]) self.ref_path = os.path.join(self.home_path, self.tree_structure[1]) self.results_path = os.path.join(self.home_path, self.tree_structure[2]) self.failed_path = os.path.join(self.home_path, self.tree_structure[3]) self.cfg['home_path'] = self.home_path self.cfg['analysis_path'] = self.analysis_path self.cfg['ref_path'] = self.ref_path self.cfg['results_path'] = self.results_path self.cfg['failed_path'] = self.failed_path with open(self.conf_path, 'w') as f: yaml.dump(self.cfg, f)
def modlog(request): name = request.module.__name__ if name.startswith('test_'): name = name[5:] #创建一个logger logger = logbook.Logger(name) #文件输出路径 logpath = '../Log' log_name = logpath + 'Test_' + name + time + '.log' # 创建handler,用于写入日志 logger.handlers.append(logbook.FileHandler(log_name, level="DEBUG")) # 创建handler,用于控制台输出 logger.handlers.append(logbook.StreamHandler(sys.stdout, level="INFO")) return modlog
def start(self, action): import os import inspect import daemon import lockfile import logbook from setproctitle import setproctitle self.name = self.name or action.__name__ default_dir = os.path.join(os.path.expanduser("~"), ".{0}".format(self.name)) if self.settings['pidfile'] is None: self.settings['pidfile'] = os.path.join( default_dir, "{0}.pid".format(self.name)) if not os.path.exists(default_dir): os.makedirs(default_dir) if isinstance(self.settings['pidfile'], basestring): pid = str(os.getpid()) with open(self.settings['pidfile'], 'w') as pidfile: pidfile.write(pid) self.settings['pidfile'] = lockfile.FileLock( self.settings['pidfile']) if isinstance(self.proc_title, basestring): setproctitle(self.proc_title) # Initialize logging if requested. action_args = inspect.getargspec(action)[0] if 'log' in action_args: self.log = logbook.Logger(self.name) if self.log_handler is None: if not os.path.exists(default_dir): os.makedirs(default_dir) self.log_handler = logbook.FileHandler( os.path.join(default_dir, "{0}.log".format(self.name))) if self.settings['files_preserve'] is None: self.settings['files_preserve'] = [self.log_handler.stream] else: self.settings['files_preserve'].append( self.log_handler.stream) self.log_handler.level_name = self.log_level with self.log_handler.applicationbound(): self.log.warning("Starting daemon.") with daemon.DaemonContext(**self.settings): action(log=self.log) else: with daemon.DaemonContext(**self.settings): action()