def main(ctx, config, log_level): ctx.obj = config logging_email = config.logging_email if logging_email is not None: from logbook import MailHandler MailHandler( logging_email.from_address, logging_email.to_address, server_addr=( logging_email.server_address, logging_email.server_port, ), credentials=( logging_email.from_address, logging_email.password, ), level=lookup_level(logging_email.log_level.upper()), secure=True, subject='Combine Error!', bubble=True, ).push_application() AlternateColorizedStderrHandler( level=lookup_level(log_level.upper()), bubble=True, ).push_application()
def _setup_file_log(self): """Add a file log handler.""" file_path = os.path.expandvars( fs.abspath(self.app.config.get('log', 'file'))) log_dir = os.path.dirname(file_path) if not os.path.exists(log_dir): os.makedirs(log_dir) if logbook.lookup_level(self.get_level()) == logbook.DEBUG: fmt_string = self._meta.debug_format else: fmt_string = self._meta.file_format if self.app.config.get('log', 'rotate'): from logbook import RotatingFileHandler file_handler = RotatingFileHandler( file_path, max_size=int(self.app.config.get('log', 'max_bytes')), backup_count=int(self.app.config.get('log', 'max_files')), format_string=fmt_string, level=logbook.lookup_level(self.get_level()), bubble=True, ) else: from logbook import FileHandler file_handler = FileHandler( file_path, format_string=fmt_string, level=logbook.lookup_level(self.get_level()), bubble=True, ) self._file_handler = file_handler self.backend.handlers.append(file_handler)
def _setup_file_log(self): """Add a file log handler.""" file_path = os.path.expandvars(fs.abspath(self.app.config.get('log', 'file'))) log_dir = os.path.dirname(file_path) if not os.path.exists(log_dir): os.makedirs(log_dir) if logbook.lookup_level(self.get_level()) == logbook.DEBUG: fmt_string = self._meta.debug_format else: fmt_string = self._meta.file_format if self.app.config.get('log', 'rotate'): from logbook import RotatingFileHandler file_handler = RotatingFileHandler( file_path, max_size=int(self.app.config.get('log', 'max_bytes')), backup_count=int(self.app.config.get('log', 'max_files')), format_string=fmt_string, level = logbook.lookup_level(self.get_level()), bubble = True, ) else: from logbook import FileHandler file_handler = FileHandler(file_path, format_string=fmt_string, level = logbook.lookup_level(self.get_level()), bubble = True, ) self._file_handler = file_handler self.backend.handlers.append(file_handler)
def _setup_console_log(self): """Add a console log handler.""" if logbook.lookup_level(self.get_level()) == logbook.DEBUG: fmt_string = self._meta.debug_format else: fmt_string = self._meta.console_format console_handler = logbook.StderrHandler( format_string=fmt_string, level = logbook.lookup_level(self.get_level()), bubble = True) self._console_handler = console_handler self.backend.handlers.append(console_handler)
def _setup_console_log(self): """Add a console log handler.""" if logbook.lookup_level(self.get_level()) == logbook.DEBUG: fmt_string = self._meta.debug_format else: fmt_string = self._meta.console_format console_handler = logbook.StderrHandler(format_string=fmt_string, level=logbook.lookup_level( self.get_level()), bubble=True) self._console_handler = console_handler self.backend.handlers.append(console_handler)
def _setup_file_handler(self): """ This helper method sets up a rotating file logger based on the base path supplied in the configuration. No file logging is performed if the base path is not supplied """ log_path_base = self.app.config.get('LOGGER_PATH_BASE') if log_path_base is None: return log_path_base = os.path.expanduser(log_path_base) if not os.path.exists(log_path_base): os.makedirs(log_path_base) log_file_name = self.app.config.get('LOGGER_FILE_NAME') if log_file_name is None: log_file_name = os.path.basename(sys.argv[0]) level_name = self.app.config.get('LOGGER_LEVEL', 'INFO') logbook.RotatingFileHandler( os.path.join(log_path_base, log_file_name + '.log'), level=logbook.lookup_level(level_name), bubble=True, format_string=self.app.config.get('LOGGER_FORMAT'), ).push_application()
def enable_logging(level=logbook.WARNING, asyncio_level=None, aiohttp_level=None): # Determine levels level = logbook.lookup_level(level) converted_level = _convert_level(level) if asyncio_level is None: asyncio_level = converted_level else: asyncio_level = _convert_level(asyncio_level) if aiohttp_level is None: aiohttp_level = converted_level else: aiohttp_level = _convert_level(aiohttp_level) # Enable logger group _logger_group.disabled = False # Enable asyncio debug logging os.environ['PYTHONASYNCIODEBUG'] = '1' # Redirect asyncio logger logger = logging.getLogger('asyncio') logger.setLevel(asyncio_level) logger.addHandler(_logger_redirect_handler) # Redirect aiohttp logger logger = logging.getLogger('aiohttp') logger.setLevel(aiohttp_level) logger.addHandler(_logger_redirect_handler)
def log_init(name=__name__, level='NOTICE', show_level=None, format_string=FORMAT_STRING, logfile=None): """Initialize a new Logger to file and colorized stderr stream""" logfile = log_file_init(log_name=name, logfile=logfile) file_handler = FileHandler(logfile, level=level, format_string=format_string, bubble=True) show_level = show_level if show_level else level cstd_handler = ColorizedStderrHandler(level=show_level, format_string=format_string, bubble=False) level = logbook.lookup_level(level) logger = Logger(name, level=level) logger.handlers.append(file_handler) logger.handlers.append(cstd_handler) logger.filename = logfile return logger
def setup_logbook(app_name, config, min_level=None): if not config.syslog: try: os.makedirs(config.log_dir) except OSError: pass app_config = config.applications[app_name] or {} handlers = app_config.get("handlers") or config.default.handler_list logbook_handlers = [] finger_cross_config = config.finger_cross.copy() top_handler = True if min_level: min_level = logbook.lookup_level(min_level) for handler_name in handlers: handler_config = config.handlers[handler_name].copy() level = handler_config.get("level") if min_level and level: level = logbook.lookup_level(level) handler_config["level"] = max(min_level, level) handler_class = getattr(logbook, handler_config.pop("type")) finger_cross = handler_config.pop("finger_cross", False) _replace_config(handler_config, "__APP__", app_name) if "format_string" not in handler_config and handler_class is not logbook.NullHandler: handler_config["format_string"] = config.default.format_string if top_handler: handler_config["bubble"] = False if "filter" in handler_config: handler_config["filter"] = globals()[handler_config["filter"]] handler = handler_class(**handler_config) if finger_cross: finger_cross_level = logbook.lookup_level( finger_cross_config.pop("action_level")) handler = logbook.FingersCrossedHandler( handler, action_level=finger_cross_level, **finger_cross_config) logbook_handlers.append(handler) top_handler = False setup = logbook.NestedSetup(logbook_handlers) return setup
def setup_logging(name, level): from logbook import NullHandler, RotatingFileHandler, lookup_level path = os.path.expanduser("~/.config/aesop/{}.log".format(name)) level = lookup_level(level) # null handler to prevent logs unhandled from RotatingFileHandler going to # stderr NullHandler().push_application() RotatingFileHandler(path, level=level).push_application()
def setup_logbook(app_name, config, min_level=None): if not config.syslog: try: os.makedirs(config.log_dir) except OSError: pass app_config = config.applications[app_name] or {} handlers = app_config.get("handlers") or config.default.handler_list logbook_handlers = [] finger_cross_config = config.finger_cross.copy() top_handler = True if min_level: min_level = logbook.lookup_level(min_level) for handler_name in handlers: handler_config = config.handlers[handler_name].copy() level = handler_config.get("level") if min_level and level: level = logbook.lookup_level(level) handler_config["level"] = max(min_level, level) handler_class = getattr(logbook, handler_config.pop("type")) finger_cross = handler_config.pop("finger_cross", False) _replace_config(handler_config, "__APP__", app_name) if "format_string" not in handler_config and handler_class is not logbook.NullHandler: handler_config["format_string"] = config.default.format_string if top_handler: handler_config["bubble"] = False if "filter" in handler_config: handler_config["filter"] = globals()[handler_config["filter"]] handler = handler_class(**handler_config) if finger_cross: finger_cross_level = logbook.lookup_level(finger_cross_config.pop("action_level")) handler = logbook.FingersCrossedHandler(handler, action_level=finger_cross_level, **finger_cross_config) logbook_handlers.append(handler) top_handler = False setup = logbook.NestedSetup(logbook_handlers) return setup
def set_level(self, level): """ Set the log level. Must be one of the log levels configured in self.levels which are ``['INFO', 'WARN', 'ERROR', 'DEBUG', 'FATAL']``. :param level: The log level to set. """ level = level.upper() if level not in self.levels: level = 'INFO' level = logbook.lookup_level(level.upper()) self.level = level
def __init__(self, collection, level=NOTSET, filter = None, bubble=False): """initialize with a mongodb collection :param collection: The MongoDB ``Collection`` object to log to. :param level: The level we log for :param filter: A filter to use :param bubble: defines if the log entry should bubble up """ self.collection = collection self.level = lookup_level(level) self.bubble = bubble self.filter = filter
def __init__(self, collection, level=NOTSET, filter=None, bubble=False): """initialize with a mongodb collection :param collection: The MongoDB ``Collection`` object to log to. :param level: The level we log for :param filter: A filter to use :param bubble: defines if the log entry should bubble up """ self.collection = collection self.level = lookup_level(level) self.bubble = bubble self.filter = filter
def __init__(self, config, options): log_level = lookup_level(options.log_level) logger = Logger("Core", log_level) self.display = config.get("core", "display_modules") colorscheme = options.color or config.get("core", "color", fallback="blue") for key in COLORS.keys(): if key == colorscheme: colorcode = COLORS[key] self.state = State(colorcode, config, logger) global PROCESSES PROCESSES = render_class(self.state, processCheck, ()) distro_out = render_class(self.state, distroCheck, ()) if not distro_out: self.state.logger.critical("Unrecognised distribution.") raise RuntimeException("Unrecognised distribution.") self.distro_name = ' '.join(distro_out[1].split()[:-1])
def _setup_default_handler(self): """ This helper method sets up the default logging which just prints to stderr. If the application is currently in TESTING mode, it is not activated because the logged messages will be captured by the test_handler """ testing_flag = self.app.config.get('TESTING') if testing_flag: return level_name = self.app.config.get('LOGGER_LEVEL', 'INFO') logbook.StreamHandler( sys.stderr, level=logbook.lookup_level(level_name), bubble=True, format_string=self.app.config.get('LOGGER_FORMAT'), ).push_application()
def __init__(self, config, options): log_level = lookup_level(options.log_level) logger = Logger("Core", log_level) self.display = config.get("core", "display_modules") colorscheme = options.color or config.get( "core", "color", fallback="blue") for key in COLORS.keys(): if key == colorscheme: colorcode = COLORS[key] self.state = State(colorcode, config, logger) global PROCESSES PROCESSES = render_class(self.state, processCheck, ()) distro_out = render_class(self.state, distroCheck, ()) if not distro_out: self.state.logger.critical("Unrecognised distribution.") raise RuntimeError("Unrecognised distribution.") self.distro_name = ' '.join(distro_out[1].split()[:-1])
def get_level(level_name): try: return logbook.lookup_level(level_name.upper()) except LookupError: return logbook.ERROR
def test_level_lookup_failures(): with pytest.raises(LookupError): logbook.get_level_name(37) with pytest.raises(LookupError): logbook.lookup_level('FOO')
def figure_out_log_level(given_level): if isinstance(given_level, str): return logbook.lookup_level(given_level.strip().upper()) else: return given_level
def __init__(self, *args, **kwargs): ''' data_frequency: 'minute' or 'daily' algoname: str, defaults to 'algo' backend: str or Backend instance, defaults to 'alpaca' (str is either backend module name under 'pylivetrader.backend', or global import path) trading_calendar: pd.DateIndex for trading calendar initialize: initialize function handle_data: handle_data function before_trading_start: before_trading_start function log_level: 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL' storage_engine: 'file', 'redis' ''' log.level = lookup_level(kwargs.pop('log_level', 'INFO')) self._recorded_vars = {} self.data_frequency = kwargs.pop('data_frequency', 'minute') assert self.data_frequency in ('minute', 'daily') self._algoname = kwargs.pop('algoname', 'algo') self.quantopian_compatible = kwargs.pop('quantopian_compatible', True) storage_engine = kwargs.pop('storage_engine', 'file') if storage_engine == 'redis': storage_engine = RedisStore() else: storage_engine = FileStore( kwargs.pop('statefile', None) or '{}-state.pkl'.format(self._algoname) ) self._state_store = StateStore(storage_engine=storage_engine) self._pipelines = {} backend_param = kwargs.pop('backend', 'alpaca') if not isinstance(backend_param, str): self._backend = backend_param self._backend_name = backend_param.__class__.__name__ else: self._backend_name = backend_param try: # First, tries to import official backend packages backendmod = importlib.import_module( 'pylivetrader.backend.{}'.format(self._backend_name)) except ImportError: # Then if failes, tries to find pkg in global package # namespace. try: backendmod = importlib.import_module( self._backend_name) except ImportError: raise RuntimeError( "Could not find backend package `{}`.".format( self._backend_name)) backend_options = kwargs.pop('backend_options', None) or {} self._backend = backendmod.Backend(**backend_options) self.asset_finder = AssetFinder(self._backend) self.trading_calendar = kwargs.pop( 'trading_calendar', get_calendar('NYSE')) self.data_portal = DataPortal( self._backend, self.asset_finder, self.trading_calendar, self.quantopian_compatible ) self.event_manager = EventManager() self.trading_controls = [] self.account_controls = [] self.restrictions = NoRestrictions() self._initialize = kwargs.pop('initialize', noop) self._handle_data = kwargs.pop('handle_data', noop) self._before_trading_start = kwargs.pop('before_trading_start', noop) self.event_manager.add_event( events.Event( events.Always(), # We pass handle_data.__func__ to get the unbound method. self.handle_data.__func__, ), prepend=True, ) self._account_needs_update = True self._portfolio_needs_update = True self._in_before_trading_start = False self._assets_from_source = [] self._context_persistence_excludes = [] self._max_shares = int(1e+11) self.initialized = False self.api_methods = [func for func in dir(Algorithm) if callable( getattr(Algorithm, func) )]
def figure_out_log_level(given_level): if isinstance(given_level, types.StringTypes): return logbook.lookup_level(given_level.strip().upper()) else: return given_level
'--debug', dest='level', action='store_const', const='DEBUG', help="use loglevel DEBUG, this is equivalent to `-l DEBUG`") group.add_argument( '--silent', dest='silent', action='store_true', help="disable logging to stdout.") args = parser.parse_args() if args.silent: log_handler = logbook.NullHandler() else: log_handler = logbook.StreamHandler(sys.stdout, level=logbook.lookup_level(args.level.upper())) with log_handler: if args.env: env = find_dotenv(args.env) log.info('loading environment from {}', env) load_dotenv(env) docker_client = docker.from_env(version='auto') repo = args.repository or os.environ.get('BUILD_REPOSITORY', '') tags = set(args.tags or {os.environ.get('BUILD_TAG', 'latest')}) if args.will_add_latest: tags.add('latest') build_args = {k[10:]: v for k, v in os.environ.items() if k.startswith('BUILD_ARG_')}
def configure_logging(config): level = logbook.lookup_level(config['log_level']) handler = logbook.StderrHandler(level=level, format_string='{record.message}') handler.push_application() def logger_factory(name=None): from structlog._frames import _find_first_app_frame_and_name if name is None: _, name = _find_first_app_frame_and_name(additional_ignores=[ f'{__package__}.logconf', ]) return logbook.Logger(name, level=level) def add_severity(logger, method_name, event_dict): if method_name == 'warn': method_name = 'warning' event_dict['severity'] = method_name.upper() return event_dict def add_func_name(logger, method_rame, event_dict): record = event_dict.get('_record') if record is None: return event_dict event_dict['function'] = record.funcName return event_dict def order_keys(order): """ Order keys for JSON readability when not using structlog_dev=True """ def processor(logger, method_name, event_dict): if not isinstance(event_dict, OrderedDict): return event_dict for key in reversed(order): if key in event_dict: event_dict.move_to_end(key, last=False) return event_dict return processor def event_enum_to_str(logger, method_name, event_dict): from k8s_snapshots import events event = event_dict.get('event') if event is None: return event_dict if isinstance(event, events.EventEnum): event_dict['snapshot_event'] = event event_dict['event'] = event.value return event_dict key_order = ['message', 'event', 'level'] if config['structlog_dev']: structlog.configure( processors=[ event_enum_to_str, ProcessStructuredErrors(), serialize_rules, structlog.stdlib.add_logger_name, structlog.stdlib.add_log_level, structlog.stdlib.PositionalArgumentsFormatter(), structlog.processors.TimeStamper(fmt='ISO'), structlog.processors.StackInfoRenderer(), structlog.processors.format_exc_info, add_func_name, add_message, order_keys(key_order), structlog.dev.ConsoleRenderer() # <=== ], context_class=OrderedDict, logger_factory=logger_factory, wrapper_class=structlog.stdlib.BoundLogger, cache_logger_on_first_use=True, ) else: # Make it so that 0 ⇒ None indent = config['structlog_json_indent'] or None structlog.configure( processors=[ event_enum_to_str, add_severity, ProcessStructuredErrors(), serialize_rules, structlog.stdlib.add_logger_name, structlog.processors.TimeStamper(fmt='ISO'), structlog.processors.StackInfoRenderer(), structlog.processors.format_exc_info, add_func_name, add_message, order_keys(key_order), structlog.processors.JSONRenderer( indent=indent, cls=SnapshotsJSONEncoder, ) ], context_class=OrderedDict, wrapper_class=structlog.stdlib.BoundLogger, logger_factory=logger_factory, cache_logger_on_first_use=True, )