def prepare(): """Prepare Oslo Logging (2 or 3 steps) Use of Oslo Logging involves the following: * logging.register_options * logging.set_defaults (optional) * logging.setup """ # Required step to register common, logging and generic configuration # variables logging.register_options(CONF) # Optional step to set new defaults if necessary for # * logging_context_format_string # * default_log_levels # # These variables default to respectively: # # import oslo_log # oslo_log._options.DEFAULT_LOG_LEVELS # oslo_log._options.log_opts[0].default # extra_log_level_defaults = ['dogpile=INFO', 'routes=INFO'] print logging.get_default_log_levels() logging.set_defaults(default_log_levels=logging.get_default_log_levels() + extra_log_level_defaults) # Required setup based on configuration and domain logging.setup(CONF, DOMAIN)
def parse_args(argv, default_config_files=None, configure_db=True, init_rpc=True): log.register_options(CONF) # We use the oslo.log default log levels which includes suds=INFO # and add only the extra levels that Nova needs log.set_defaults(default_log_levels=log.get_default_log_levels() + _EXTRA_DEFAULT_LOG_LEVELS) options.set_defaults(CONF, connection=_DEFAULT_SQL_CONNECTION, sqlite_db='nova.sqlite') rpc.set_defaults(control_exchange='nova') cache.configure(CONF) debugger.register_cli_opts() config.set_middleware_defaults() CONF(argv[1:], project='nova', version=version.version_string(), default_config_files=default_config_files) if init_rpc: rpc.init(CONF) if configure_db: sqlalchemy_api.configure(CONF)
def main(): # objects.register_all() """Parse options and call the appropriate class/method.""" CONF.register_cli_opt(category_opt) try: com_config.parse_args(sys.argv) logging.set_defaults( default_log_levels=logging.get_default_log_levels() + _EXTRA_DEFAULT_LOG_LEVELS) logging.setup(CONF, "jacket") except cfg.ConfigFilesNotFoundError: cfgfile = CONF.config_file[-1] if CONF.config_file else None if cfgfile and not os.access(cfgfile, os.R_OK): st = os.stat(cfgfile) print(_("Could not read %s. Re-running with sudo") % cfgfile) try: os.execvp('sudo', ['sudo', '-u', '#%s' % st.st_uid] + sys.argv) except OSError: print(_('sudo failed, continuing as if nothing happened')) print(_('Please re-run jacket-manage as root.')) return (2) fn = CONF.category.action_fn fn_kwargs = fetch_func_args(fn) fn(**fn_kwargs)
def prepare_service(args=None, default_config_files=None): conf = cfg.ConfigOpts() # opts.set_defaults() log.register_options(conf) # Register our own Gnocchi options for group, options in opts.list_opts(): conf.register_opts(list(options), group=None if group == "DEFAULT" else group) ka_loading.register_auth_conf_options(conf, 'gnocchi') ka_loading.register_session_conf_options(conf, 'gnocchi') conf.set_default("workers", get_default_workers()) conf.set_default("auth_type", "gnocchi-noauth", "gnocchi") conf(args, project='gnocchi-nagios', validate_default_values=True, default_config_files=default_config_files, version=pbr.version.VersionInfo('gnocchi-nagios').version_string()) log.set_defaults(default_log_levels=log.get_default_log_levels() + ["passlib.utils.compat=INFO"]) log.setup(conf, 'gnocchi-nagios') conf.log_opt_values(LOG, log.DEBUG) return conf
def main(): log_levels = [ 'docker=WARN', ] default_log_levels = logging.get_default_log_levels() default_log_levels.extend(log_levels) logging.set_defaults(default_log_levels=default_log_levels) logging.register_options(CONF) cfg.parse_args(sys.argv) logging.setup(CONF, None) debug_utils.setup() from trove.guestagent import dbaas manager = dbaas.datastore_registry().get(CONF.datastore_manager) if not manager: msg = (_("Manager class not registered for datastore manager %s") % CONF.datastore_manager) raise RuntimeError(msg) if not CONF.guest_id: msg = (_("The guest_id parameter is not set. guest_info.conf " "was not injected into the guest or not read by guestagent")) raise RuntimeError(msg) # Create user and group for running docker container. LOG.info('Creating user and group for database service') uid = cfg.get_configuration_property('database_service_uid') operating_system.create_user('database', uid) # Mount device if needed. # When doing rebuild, the device should be already formatted but not # mounted. device_path = CONF.get(CONF.datastore_manager).device_path mount_point = CONF.get(CONF.datastore_manager).mount_point device = volume.VolumeDevice(device_path) if not device.mount_points(device_path): LOG.info('Preparing the storage for %s, mount path %s', device_path, mount_point) device.format() device.mount(mount_point) operating_system.chown(mount_point, CONF.database_service_uid, CONF.database_service_uid, recursive=True, as_root=True) # rpc module must be loaded after decision about thread monkeypatching # because if thread module is not monkeypatched we can't use eventlet # executor from oslo_messaging library. from trove import rpc rpc.init(CONF) from trove.common.rpc import service as rpc_service server = rpc_service.RpcService( key=CONF.instance_rpc_encr_key, topic="guestagent.%s" % CONF.guest_id, manager=manager, host=CONF.guest_id, rpc_api_version=guest_api.API.API_LATEST_VERSION) launcher = openstack_service.launch(CONF, server, restart_method='mutate') launcher.wait()
def parse_args(argv, default_config_files=None, configure_db=True, init_rpc=True): log.register_options(CONF) # We use the oslo.log default log levels which includes suds=INFO # and add only the extra levels that Nova needs if CONF.glance.debug: extra_default_log_levels = ['glanceclient=DEBUG'] else: extra_default_log_levels = ['glanceclient=WARN'] log.set_defaults(default_log_levels=log.get_default_log_levels() + extra_default_log_levels) rpc.set_defaults(control_exchange='nova') if profiler: profiler.set_defaults(CONF) config.set_middleware_defaults() CONF(argv[1:], project='nova', version=version.version_string(), default_config_files=default_config_files) if init_rpc: rpc.init(CONF) if configure_db: sqlalchemy_api.configure(CONF)
def main(): objects.register_all() gmr_opts.set_defaults(CONF) CONF(sys.argv[1:], project='cinder', version=version.version_string()) logging.set_defaults( default_log_levels=logging.get_default_log_levels() + _EXTRA_DEFAULT_LOG_LEVELS) logging.setup(CONF, "cinder") python_logging.captureWarnings(True) priv_context.init(root_helper=shlex.split(utils.get_root_helper())) utils.monkey_patch() gmr.TextGuruMeditation.setup_autorun(version, conf=CONF) global LOG LOG = logging.getLogger(__name__) if CONF.backup_workers > 1: LOG.info('Backup running with %s processes.', CONF.backup_workers) launcher = service.get_launcher() for i in range(CONF.backup_workers): _launch_backup_process(launcher, i) launcher.wait() else: LOG.info('Backup running in single process mode.') server = service.Service.create(binary='cinder-backup', coordination=True, process_number=1) service.serve(server) service.wait()
def _log_setup(self): CONF = cfg.CONF level = self.conf_log.get('log_level') logging.set_defaults( default_log_levels=logging.get_default_log_levels() + ['controller=' + level]) DOMAIN = "masakari" CONF.log_file = self.conf_log.get("log_file") CONF.use_stderr = False logging.register_options(CONF) logging.setup(CONF, DOMAIN) log_dir = os.path.dirname(self.conf_log.get("log_file")) # create log dir if not created try: os.makedirs(log_dir) except OSError as exc: if exc.errno == errno.EEXIST and os.path.isdir(log_dir): pass else: raise return
def parse_args(argv, no_yaml=False): """Sets up configuration of monasca-notification.""" global _CONF_LOADED if _CONF_LOADED: LOG.debug('Configuration has been already loaded') return conf.register_opts(CONF) log.register_options(CONF) default_log_levels = (log.get_default_log_levels()) log.set_defaults(default_log_levels=default_log_levels) CONF(args=argv, project='monasca', prog='notification', version=version.version_string, description=''' monasca-notification is an engine responsible for transforming alarm transitions into proper notifications ''') log.setup(CONF, product_name='monasca-notification', version=version.version_string) if not no_yaml: # note(trebskit) used only in test cases as the notification.yml # will be dropped eventually set_from_yaml() _CONF_LOADED = True
def parse_args(argv, default_config_files=None, configure_db=True, init_rpc=True): log.register_options(CONF) # We use the oslo.log default log levels which includes suds=INFO # and add only the extra levels that Nova needs if CONF.glance.debug: extra_default_log_levels = ['glanceclient=DEBUG'] else: extra_default_log_levels = ['glanceclient=WARN'] log.set_defaults(default_log_levels=log.get_default_log_levels() + extra_default_log_levels) rpc.set_defaults(control_exchange='nova') config.set_middleware_defaults() CONF(argv[1:], project='nova', version=version.version_string(), default_config_files=default_config_files) if init_rpc: rpc.init(CONF) if configure_db: sqlalchemy_api.configure(CONF)
def parse_args(argv, default_config_files=None, configure_db=True, init_rpc=True): log.register_options(CONF) # We use the oslo.log default log levels which includes suds=INFO # and add only the extra levels that Nova needs if CONF.glance.debug: extra_default_log_levels = ['glanceclient=DEBUG'] else: extra_default_log_levels = ['glanceclient=WARN'] # NOTE(danms): DEBUG logging in privsep will result in some large # and potentially sensitive things being logged. extra_default_log_levels.append('oslo.privsep.daemon=INFO') log.set_defaults(default_log_levels=log.get_default_log_levels() + extra_default_log_levels) rpc.set_defaults(control_exchange='nova') if profiler: profiler.set_defaults(CONF) config.set_middleware_defaults() CONF(argv[1:], project='nova', version=version.version_string(), default_config_files=default_config_files) if init_rpc: rpc.init(CONF) if configure_db: sqlalchemy_api.configure(CONF) placement_db.configure(CONF)
def parse_args(argv, default_config_files=None, configure_db=True, init_rpc=True): log.register_options(CONF) # We use the oslo.log default log levels which includes suds=INFO # and add only the extra levels that Nova needs if CONF.glance.debug: extra_default_log_levels = ['glanceclient=DEBUG'] else: extra_default_log_levels = ['glanceclient=WARN'] # NOTE(danms): DEBUG logging in privsep will result in some large # and potentially sensitive things being logged. extra_default_log_levels.append('oslo.privsep.daemon=INFO') log.set_defaults(default_log_levels=log.get_default_log_levels() + extra_default_log_levels) rpc.set_defaults(control_exchange='nova') if profiler: profiler.set_defaults(CONF) config.set_middleware_defaults() CONF(argv[1:], project='nova', version=version.version_string(), default_config_files=default_config_files) if init_rpc: rpc.init(CONF) if configure_db: sqlalchemy_api.configure(CONF)
def parse_args(args=None, usage=None, default_config_files=None): CLI_OPTS = [launch_opt] CONF.register_cli_opts(CLI_OPTS) for group, options in list_opts(): CONF.register_opts(list(options), group) _DEFAULT_LOG_LEVELS = [ 'eventlet.wsgi.server=WARN', 'oslo_service.periodic_task=INFO', 'oslo_service.loopingcall=INFO', 'oslo_db=WARN', 'oslo_concurrency.lockutils=WARN', 'kubernetes.client.rest=%s' % CONF.kubernetes.log_devel, 'keystoneclient=INFO', 'requests.packages.urllib3.connectionpool=CRITICAL', 'urllib3.connectionpool=CRITICAL', 'cotyledon=INFO', 'futurist.periodics=WARN' ] default_log_levels = log.get_default_log_levels() default_log_levels.extend(_DEFAULT_LOG_LEVELS) log.set_defaults(default_log_levels=default_log_levels) log.register_options(CONF) CONF(args=args, project='qinling', version=version, usage=usage, default_config_files=default_config_files)
def setup_logging(conf): # Add additional dependent libraries that have unhelp bug levels extra_log_level_defaults = [] logging.set_defaults(default_log_levels=logging.get_default_log_levels() + extra_log_level_defaults) logging.setup(conf, 'deckhand') py_logging.captureWarnings(True)
def prepare_log(): logging.register_options(CONF) extra_log_level_defaults = ['dogpile=INFO', 'routes=INFO'] logging.set_defaults(default_log_levels=logging.get_default_log_levels() + extra_log_level_defaults) logging.setup(CONF, DOMAIN)
def setup_logging(): logging.setup(CONF, 'kuryr-kubernetes') logging.set_defaults(default_log_levels=logging.get_default_log_levels()) version_k8s = version.version_info.version_string() LOG.info("Logging enabled!") LOG.info("%(prog)s version %(version)s", {'prog': sys.argv[0], 'version': version_k8s})
def parse_args(argv, default_config_files=None): log.register_options(CONF) log.set_defaults(default_log_levels=log.get_default_log_levels()) CONF(argv[1:], project='aardvark', version=version.version_info, default_config_files=default_config_files)
def prepare_service(args=None, conf=None, default_config_files=None): if conf is None: conf = cfg.ConfigOpts() opts.set_defaults() # FIXME(jd) Use the pkg_entry info to register the options of these libs log.register_options(conf) db_options.set_defaults(conf) policy_opts.set_defaults(conf) # Register our own Gnocchi options for group, options in opts.list_opts(): conf.register_opts(list(options), group=None if group == "DEFAULT" else group) conf.set_default("workers", utils.get_default_workers(), group="metricd") conf(args, project='gnocchi', validate_default_values=True, default_config_files=default_config_files, version=pbr.version.VersionInfo('gnocchi').version_string()) # HACK(jd) I'm not happy about that, fix AP class to handle a conf object? archive_policy.ArchivePolicy.DEFAULT_AGGREGATION_METHODS = ( conf.archive_policy.default_aggregation_methods) # If no coordination URL is provided, default to using the indexer as # coordinator if conf.storage.coordination_url is None: if conf.storage.driver == "redis": conf.set_default("coordination_url", conf.storage.redis_url, "storage") elif conf.incoming.driver == "redis": conf.set_default("coordination_url", conf.incoming.redis_url, "storage") else: parsed = urlparse.urlparse(conf.indexer.url) proto, _, _ = parsed.scheme.partition("+") parsed = list(parsed) # Set proto without the + part parsed[0] = proto conf.set_default("coordination_url", urlparse.urlunparse(parsed), "storage") cfg_path = conf.oslo_policy.policy_file if not os.path.isabs(cfg_path): cfg_path = conf.find_file(cfg_path) if cfg_path is None or not os.path.exists(cfg_path): cfg_path = os.path.abspath( os.path.join(os.path.dirname(__file__), 'rest', 'policy.json')) conf.set_default('policy_file', cfg_path, group='oslo_policy') log.set_defaults(default_log_levels=log.get_default_log_levels() + ["passlib.utils.compat=INFO"]) log.setup(conf, 'gnocchi') conf.log_opt_values(LOG, log.DEBUG) return conf
def parse_args(argv, default_config_files=None): log.register_options(cfg.CONF) log.set_defaults(default_log_levels=log.get_default_log_levels()) cfg.CONF(argv[1:], project='deepaas', version=deepaas.__version__, default_config_files=default_config_files)
def parse_args(argv, default_config_files=None): log.register_options(CONF) # We use the oslo.log default log levels which includes suds=INFO # and add only the extra levels that Masakari needs log.set_defaults(default_log_levels=log.get_default_log_levels()) CONF(argv[1:], project='masakarimonitors', version=version.version_string(), default_config_files=default_config_files)
def setup_logging(): logging.setup(CONF, 'fuxi-kubernetes') logging.set_defaults(default_log_levels=logging.get_default_log_levels()) version_k8s = pbr.version.VersionInfo('fuxi-kubernetes').version_string() LOG.info(_LI("Logging enabled!")) LOG.info(_LI("%(prog)s version %(version)s"), { 'prog': sys.argv[0], 'version': version_k8s })
def setup_logging(config): # Any dependent libraries that have unhelp debug levels should be # pinned to a higher default. extra_log_level_defaults = [ 'routes=INFO', ] logging.set_defaults(default_log_levels=logging.get_default_log_levels() + extra_log_level_defaults) logging.setup(config, 'nova') py_logging.captureWarnings(True)
def init(args, **kwargs): cfg.CONF(args=args, project='dragonflow', **kwargs) cfg.CONF.host = socket.gethostname() product_name = "dragonflow" logging.set_defaults(default_log_levels=logging.get_default_log_levels() + EXTRA_LOG_LEVEL_DEFAULTS) logging.setup(cfg.CONF, product_name) LOG.info("Logging enabled!") LOG.info("%(prog)s", {'prog': sys.argv[0]}) LOG.debug("command line: %s", " ".join(sys.argv)) df_profiler.setup(sys.argv[0], conf.CONF.host)
def set_request_context(self): level = self.conf_log.get('log_level') logging.set_defaults( logging_context_format_string=self.conf_log.get( "logging_context_format_string"), default_log_levels=logging.get_default_log_levels() + ['controller=' + level]) context.RequestContext() return
def setup_logging(): # even in debug mode we don't want to much talk from these extra_log_level_defaults = [ '%s.engine.exa_bgp_peer_worker.exabgp=INFO' % BAGPIPE_BGP_MODULE, '%s.common.looking_glass=WARNING' % BAGPIPE_BGP_MODULE ] logging.set_defaults(default_log_levels=(logging.get_default_log_levels() + extra_log_level_defaults)) logging.setup(cfg.CONF, "bagpipe-bpg")
def parse_args(argv, default_config_files=None, configure_db=True, init_rpc=True): log.register_options(CONF) # We use the oslo.log default log levels which includes suds=INFO # and add only the extra levels that Masakari needs log.set_defaults(default_log_levels=log.get_default_log_levels()) config.set_middleware_defaults() CONF(argv[1:], project='masakari', version=version.version_string(), default_config_files=default_config_files)
def setup_logging(): """Sets up the logging options for a log with supplied name.""" product_name = "neutron" logging.setup(cfg.CONF, product_name) # We use the oslo.log default log levels and add only the extra levels # that Neutron needs. logging.set_defaults(default_log_levels=logging.get_default_log_levels()) LOG.info(_LI("Logging enabled!")) LOG.info(_LI("%(prog)s version %(version)s"), {'prog': sys.argv[0], 'version': version.version_info.release_string()}) LOG.debug("command line: %s", " ".join(sys.argv))
def set_default_for_default_log_levels(): """Set the default for the default_log_levels option for Armada. Armada uses some packages that other OpenStack services don't use that do logging. This will set the default_log_levels default level for those packages. This function needs to be called before CONF(). """ extra_log_level_defaults = ['kubernetes.client.rest=INFO'] log.set_defaults(default_log_levels=log.get_default_log_levels() + extra_log_level_defaults, )
def parse_args(args=None, usage=None, default_config_files=None): default_log_levels = log.get_default_log_levels() default_log_levels.extend(_DEFAULT_LOG_LEVELS) log.set_defaults(default_log_levels=default_log_levels) log.register_options(CONF) CONF(args=args, project='mistral', version=version.version_string, usage=usage, default_config_files=default_config_files)
def setup_logging(): # even in debug mode we don't want to much talk from these extra_log_level_defaults = [ '%s.engine.exabgp_peer_worker.exabgp=INFO' % BAGPIPE_BGP_MODULE, '%s.common.looking_glass=WARNING' % BAGPIPE_BGP_MODULE, '%s.engine.route_table_manager=INFO' % BAGPIPE_BGP_MODULE, 'ovsdbapp.backend.ovs_idl.vlog=INFO', ] logging.set_defaults(default_log_levels=(logging.get_default_log_levels() + extra_log_level_defaults)) logging.setup(cfg.CONF, "bagpipe-bgp")
def set_log_defaults(): # We use the oslo.log default log levels which includes suds=INFO # and add only the extra levels that Nova needs if CONF.glance.debug: extra_default_log_levels = ['glanceclient=DEBUG'] else: extra_default_log_levels = ['glanceclient=WARN'] # NOTE(danms): DEBUG logging in privsep will result in some large # and potentially sensitive things being logged. extra_default_log_levels.append('oslo.privsep.daemon=INFO') log.set_defaults(default_log_levels=log.get_default_log_levels() + extra_default_log_levels)
def setup_logging(conf): """Sets up the logging options for a log with supplied name. :param conf: a cfg.ConfOpts object """ logging.set_defaults(default_log_levels=logging.get_default_log_levels() + EXTRA_LOG_LEVEL_DEFAULTS) product_name = "octavia" logging.setup(conf, product_name) LOG.info("Logging enabled!") LOG.info("%(prog)s version %(version)s", {'prog': sys.argv[0], 'version': version.version_info.release_string()}) LOG.debug("command line: %s", " ".join(sys.argv))
def setup_logging(): """Sets up the logging options for a log with supplied name.""" product_name = "networking-ovn" # We use the oslo.log default log levels and add only the extra levels # that we need. logging.set_defaults(default_log_levels=logging.get_default_log_levels() + EXTRA_LOG_LEVEL_DEFAULTS) logging.setup(cfg.CONF, product_name) LOG.info("Logging enabled!") LOG.info("%(prog)s version %(version)s", { 'prog': sys.argv[0], 'version': version.version_info.release_string() }) LOG.debug("command line: %s", " ".join(sys.argv))
def prepare_service(argv=None): if argv is None: argv = [] logging.register_options(cfg.CONF) extra_log_level_defaults = [ 'qpid.messaging=INFO', 'keystoneclient=INFO', 'eventlet.wsgi.server=WARN' ] logging.set_defaults(default_log_levels=logging.get_default_log_levels() + extra_log_level_defaults) rpc.set_defaults(control_exchange='sysinv') cfg.CONF(argv[1:], project='sysinv', version=version.version_string()) logging.setup(cfg.CONF, 'sysinv')
def parse_args(args=None, usage=None, default_config_files=None): default_log_levels = log.get_default_log_levels() default_log_levels.extend(_DEFAULT_LOG_LEVELS) log.set_defaults(default_log_levels=default_log_levels) log.register_options(CONF) CONF( args=args, project='mistral', version=version.version_string, usage=usage, default_config_files=default_config_files )
def prepare(): """Prepare Oslo Logging (2 or 3 steps) Use of Oslo Logging involves the following: * logging.register_options * logging.set_defaults (optional) * logging.setup """ LOG.debug("Prepare Oslo Logging") LOG.info("Size of configuration options before %d", len(CONF)) # Required step to register common, logging and generic configuration # variables logging.register_options(CONF) LOG.info("Size of configuration options after %d", len(CONF)) # Optional step to set new defaults if necessary for # * logging_context_format_string # * default_log_levels # # These variables default to respectively: # # import oslo_log # oslo_log._options.DEFAULT_LOG_LEVELS # oslo_log._options.log_opts[0].default # custom_log_level_defaults = logging.get_default_log_levels() + [ 'dogpile=INFO', 'routes=INFO' ] logging.set_defaults(default_log_levels=custom_log_level_defaults) # NOTE: We cannot show the contents of the CONF object # after register_options() because accessing this caches # the default_log_levels subsequently modified with set_defaults() LOG.info("List of Oslo Logging configuration options and current values") LOG.info("=" * 80) for c in CONF: LOG.info("%s = %s" % (c, CONF[c])) LOG.info("=" * 80) # Required setup based on configuration and domain logging.setup(CONF, DOMAIN)
def prepare_service(args=None, conf=None, default_config_files=None): if conf is None: conf = cfg.ConfigOpts() opts.set_defaults() # FIXME(jd) Use the pkg_entry info to register the options of these libs log.register_options(conf) db_options.set_defaults(conf) policy_opts.set_defaults(conf) # Register our own Gnocchi options for group, options in opts.list_opts(): conf.register_opts(list(options), group=None if group == "DEFAULT" else group) # HACK(jd) I'm not happy about that, fix AP class to handle a conf object? archive_policy.ArchivePolicy.DEFAULT_AGGREGATION_METHODS = ( conf.archive_policy.default_aggregation_methods ) try: default_workers = multiprocessing.cpu_count() or 1 except NotImplementedError: default_workers = 1 conf.set_default("workers", default_workers, group="metricd") conf(args, project='gnocchi', validate_default_values=True, default_config_files=default_config_files, version=pbr.version.VersionInfo('gnocchi').version_string()) # If no coordination URL is provided, default to using the indexer as # coordinator if conf.storage.coordination_url is None: parsed = urlparse.urlparse(conf.indexer.url) proto, _, _ = parsed.scheme.partition("+") parsed = list(parsed) # Set proto without the + part parsed[0] = proto conf.set_default("coordination_url", urlparse.urlunparse(parsed), "storage") log.set_defaults(default_log_levels=log.get_default_log_levels() + ["passlib.utils.compat=INFO"]) log.setup(conf, 'gnocchi') conf.log_opt_values(LOG, log.DEBUG) return conf
def set_default_for_default_log_levels(): """Set the default for the default_log_levels option for keystone. Keystone uses some packages that other OpenStack services don't use that do logging. This will set the default_log_levels default level for those packages. This function needs to be called before CONF(). """ extra_log_level_defaults = [ 'dogpile=INFO', 'routes=INFO', ] log.register_options(CONF) log.set_defaults(default_log_levels=log.get_default_log_levels() + extra_log_level_defaults)
def init_logs(product="default_project"): logging.register_options(CONF) CONF.logging_exception_prefix = \ "%(color)s%(asctime)s.%(msecs)03d " \ "TRACE %(name)s [01;35m%(instance)s[00m" CONF.logging_debug_format_suffix = \ "[00;33mfrom " \ "(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d[00m" CONF.logging_default_format_string = \ "%(asctime)s.%(msecs)03d %(color)s%(levelname)s %(name)s " \ "[[00;36m-%(color)s] [01;35m%(instance)s%(color)s%(message)s[00m" CONF.logging_context_format_string = \ "%(asctime)s.%(msecs)03d %(color)s%(levelname)s %(name)s " \ "[[01;36m%(request_id)s " \ "[00;36m%(user_name)s %(project_name)s%(color)s] " \ "[01;35m%(instance)s%(color)s%(message)s[00m" logging.set_defaults(default_log_levels=logging.get_default_log_levels()) logging.setup(CONF, product)
def parse_args(argv, default_config_files=None, configure_db=True, init_rpc=True): log.register_options(CONF) # We use the oslo.log default log levels which includes suds=INFO # and add only the extra levels that Nova needs log.set_defaults(default_log_levels=log.get_default_log_levels() + _EXTRA_DEFAULT_LOG_LEVELS) rpc.set_defaults(control_exchange='nova') config.set_middleware_defaults() CONF(argv[1:], project='nova', version=version.version_string(), default_config_files=default_config_files) if init_rpc: rpc.init(CONF) if configure_db: sqlalchemy_api.configure(CONF)
def prepare(): """Prepare Oslo Logging (2 or 3 steps) Use of Oslo Logging involves the following: * logging.register_options * logging.set_defaults (optional) * logging.setup """ # Required step to register common, logging and generic configuration # variables logging.register_options(CONF) # Optional step to set new defaults if necessary for # * logging_context_format_string # * default_log_levels # # These variables default to respectively: # # import oslo_log # oslo_log._options.DEFAULT_LOG_LEVELS # oslo_log._options.log_opts[0].default # extra_log_level_defaults = [ 'dogpile=INFO', 'routes=INFO' ] logging.set_defaults( default_log_levels=logging.get_default_log_levels() + extra_log_level_defaults) # Required setup based on configuration and domain logging.setup(CONF, DOMAIN)
def parse_args(argv, no_yaml=False): """Sets up configuration of monasca-notification.""" global _CONF_LOADED if _CONF_LOADED: LOG.debug('Configuration has been already loaded') return conf.register_opts(CONF) log.register_options(CONF) default_log_levels = (log.get_default_log_levels()) log.set_defaults(default_log_levels=default_log_levels) CONF(args=argv, project='monasca', prog=sys.argv[1:], version=version.version_string, default_config_files=_get_config_files(), description=''' monasca-notification is an engine responsible for transforming alarm transitions into proper notifications ''') conf.register_enabled_plugin_opts(CONF) log.setup(CONF, product_name='monasca-notification', version=version.version_string) if not no_yaml: # note(trebskit) used only in test cases as the notification.yml # will be dropped eventually set_from_yaml() _CONF_LOADED = True
CONF.register_opts(db_opts) CONF.register_opts(dns_opts) CONF.register_opts(accessible_ip_opts) log.register_options(CONF) sahara_default_log_levels = [ 'stevedore=INFO', 'eventlet.wsgi.server=WARN', 'paramiko=WARN', 'requests=WARN', 'neutronclient=INFO', ] log.set_defaults( default_log_levels=log.get_default_log_levels()+sahara_default_log_levels) def list_opts(): # NOTE (vgridnev): we make these import here to avoid problems # with importing unregistered options in sahara code. # As example, importing 'node_domain' in # sahara/conductor/objects.py from sahara.conductor import api from sahara import main as sahara_main from sahara.service import coordinator from sahara.service.edp import job_utils from sahara.service.heat import heat_engine from sahara.service.heat import templates from sahara.service import ntp_service
def test_default_log_level_method(self): self.assertEqual(_options.DEFAULT_LOG_LEVELS, log.get_default_log_levels())
# Default TTL cfg.IntOpt('default-ttl', default=3600, help='TTL Value'), # Default SOA Values cfg.IntOpt('default-soa-refresh-min', default=3500, deprecated_name='default-soa-refresh', help='SOA refresh-min value'), cfg.IntOpt('default-soa-refresh-max', default=3600, help='SOA max value'), cfg.IntOpt('default-soa-retry', default=600, help='SOA retry'), cfg.IntOpt('default-soa-expire', default=86400, help='SOA expire'), cfg.IntOpt('default-soa-minimum', default=3600, help='SOA minimum value'), # Supported record types cfg.ListOpt('supported-record-type', help='Supported record types', default=['A', 'AAAA', 'CNAME', 'MX', 'SRV', 'TXT', 'SPF', 'NS', 'PTR', 'SSHFP', 'SOA', 'NAPTR', 'CAA']), ] # Set some Oslo Log defaults log.set_defaults(default_log_levels=log.get_default_log_levels() + _EXTRA_DEFAULT_LOG_LEVELS) # Set some Oslo RPC defaults messaging.set_transport_defaults('designate') # Set some Oslo Oslo Concurrency defaults lockutils.set_defaults(lock_path='$state_path') cfg.CONF.register_opts(designate_opts)
def setup(): logging.register_options(CONF) logging.set_defaults(default_log_levels=logging.get_default_log_levels()) CONF(["--config-file", "/etc/pyingx/pyingx.conf"], project="test_server_conf") logging.setup(CONF, "test_server_log")