示例#1
0
def prepare_service(args=None, conf=None,
                    default_config_files=None):
    if conf is None:
        conf = cfg.ConfigOpts()
    # FIXME(jd) Use the pkg_entry info to register the options of these libs
    log.register_options(conf)
    db_options.set_defaults(conf)
    policy_opts.set_defaults(conf)

    # Register our own Gnocchi options
    for group, options in opts.list_opts():
        conf.register_opts(list(options),
                           group=None if group == "DEFAULT" else group)

    # HACK(jd) I'm not happy about that, fix AP class to handle a conf object?
    archive_policy.ArchivePolicy.DEFAULT_AGGREGATION_METHODS = (
        conf.archive_policy.default_aggregation_methods
    )

    try:
        default_workers = multiprocessing.cpu_count() or 1
    except NotImplementedError:
        default_workers = 1

    conf.set_default("workers", default_workers, group="api")
    conf.set_default("workers", default_workers, group="metricd")

    conf(args, project='gnocchi', validate_default_values=True,
         default_config_files=default_config_files)
    log.setup(conf, 'gnocchi')
    conf.log_opt_values(LOG, logging.DEBUG)

    return conf
示例#2
0
def prepare_service(args=None, conf=None, default_config_files=None):
    if conf is None:
        conf = cfg.ConfigOpts()
    opts.set_defaults()
    # FIXME(jd) Use the pkg_entry info to register the options of these libs
    log.register_options(conf)
    db_options.set_defaults(conf)
    policy_opts.set_defaults(conf)

    # Register our own Gnocchi options
    for group, options in opts.list_opts():
        conf.register_opts(list(options),
                           group=None if group == "DEFAULT" else group)

    conf.set_default("workers", utils.get_default_workers(), group="metricd")

    conf(args,
         project='gnocchi',
         validate_default_values=True,
         default_config_files=default_config_files,
         version=pbr.version.VersionInfo('gnocchi').version_string())

    # HACK(jd) I'm not happy about that, fix AP class to handle a conf object?
    archive_policy.ArchivePolicy.DEFAULT_AGGREGATION_METHODS = (
        conf.archive_policy.default_aggregation_methods)

    # If no coordination URL is provided, default to using the indexer as
    # coordinator
    if conf.storage.coordination_url is None:
        if conf.storage.driver == "redis":
            conf.set_default("coordination_url", conf.storage.redis_url,
                             "storage")
        elif conf.incoming.driver == "redis":
            conf.set_default("coordination_url", conf.incoming.redis_url,
                             "storage")
        else:
            parsed = urlparse.urlparse(conf.indexer.url)
            proto, _, _ = parsed.scheme.partition("+")
            parsed = list(parsed)
            # Set proto without the + part
            parsed[0] = proto
            conf.set_default("coordination_url", urlparse.urlunparse(parsed),
                             "storage")

    cfg_path = conf.oslo_policy.policy_file
    if not os.path.isabs(cfg_path):
        cfg_path = conf.find_file(cfg_path)
    if cfg_path is None or not os.path.exists(cfg_path):
        cfg_path = os.path.abspath(
            os.path.join(os.path.dirname(__file__), 'rest', 'policy.json'))
    conf.set_default('policy_file', cfg_path, group='oslo_policy')

    log.set_defaults(default_log_levels=log.get_default_log_levels() +
                     ["passlib.utils.compat=INFO"])
    log.setup(conf, 'gnocchi')
    conf.log_opt_values(LOG, log.DEBUG)

    return conf
示例#3
0
def prepare_service(args=None, conf=None,
                    default_config_files=None):
    if conf is None:
        conf = cfg.ConfigOpts()
    opts.set_defaults()
    # FIXME(jd) Use the pkg_entry info to register the options of these libs
    log.register_options(conf)
    db_options.set_defaults(conf)
    policy_opts.set_defaults(conf)

    # Register our own Gnocchi options
    for group, options in opts.list_opts():
        conf.register_opts(list(options),
                           group=None if group == "DEFAULT" else group)

    # HACK(jd) I'm not happy about that, fix AP class to handle a conf object?
    archive_policy.ArchivePolicy.DEFAULT_AGGREGATION_METHODS = (
        conf.archive_policy.default_aggregation_methods
    )

    try:
        default_workers = multiprocessing.cpu_count() or 1
    except NotImplementedError:
        default_workers = 1

    conf.set_default("workers", default_workers, group="metricd")

    conf(args, project='gnocchi', validate_default_values=True,
         default_config_files=default_config_files,
         version=pbr.version.VersionInfo('gnocchi').version_string())

    # If no coordination URL is provided, default to using the indexer as
    # coordinator
    if conf.storage.coordination_url is None:
        parsed = urlparse.urlparse(conf.indexer.url)
        proto, _, _ = parsed.scheme.partition("+")
        parsed = list(parsed)
        # Set proto without the + part
        parsed[0] = proto
        conf.set_default("coordination_url",
                         urlparse.urlunparse(parsed),
                         "storage")

    log.set_defaults(default_log_levels=log.get_default_log_levels() +
                     ["passlib.utils.compat=INFO"])
    log.setup(conf, 'gnocchi')
    conf.log_opt_values(LOG, log.DEBUG)

    return conf
示例#4
0
def prepare_service(args=None, conf=None, default_config_files=None):
    if conf is None:
        conf = cfg.ConfigOpts()
    # FIXME(jd) Use the pkg_entry info to register the options of these libs
    log.register_options(conf)
    db_options.set_defaults(conf)
    policy_opts.set_defaults(conf)

    # Register our own Gnocchi options
    for group, options in opts.list_opts():
        conf.register_opts(list(options),
                           group=None if group == "DEFAULT" else group)

    # HACK(jd) I'm not happy about that, fix AP class to handle a conf object?
    archive_policy.ArchivePolicy.DEFAULT_AGGREGATION_METHODS = (
        conf.archive_policy.default_aggregation_methods)

    try:
        default_workers = multiprocessing.cpu_count() or 1
    except NotImplementedError:
        default_workers = 1

    conf.set_default("workers", default_workers, group="api")
    conf.set_default("workers", default_workers, group="metricd")

    conf(args,
         project='gnocchi',
         validate_default_values=True,
         default_config_files=default_config_files,
         version=pbr.version.VersionInfo('gnocchi').version_string())

    # If no coordination URL is provided, default to using the indexer as
    # coordinator
    if conf.storage.coordination_url is None:
        parsed = urlparse.urlparse(conf.indexer.url)
        proto, _, _ = parsed.scheme.partition("+")
        parsed = list(parsed)
        # Set proto without the + part
        parsed[0] = proto
        conf.set_default("coordination_url", urlparse.urlunparse(parsed),
                         "storage")

    log.set_defaults(default_log_levels=log.get_default_log_levels() +
                     ["passlib.utils.compat=INFO"])
    log.setup(conf, 'gnocchi')
    conf.log_opt_values(LOG, log.DEBUG)

    return conf
示例#5
0
def prepare_service(args=None):
    conf = cfg.ConfigOpts()
    # FIXME(jd) Use the pkg_entry info to register the options of these libs
    log.register_options(conf)
    db_options.set_defaults(conf)
    for group, options in ks_opts.list_auth_token_opts():
        conf.register_opts(list(options), group=group)
    policy_opts.set_defaults(conf)

    # Register our own Gnocchi options
    for group, options in opts.list_opts():
        conf.register_opts(list(options),
                           group=None if group == "DEFAULT" else group)

    # HACK(jd) I'm not happy about that, fix AP class to handle a conf object?
    archive_policy.ArchivePolicy.DEFAULT_AGGREGATION_METHODS = (
        conf.archive_policy.default_aggregation_methods
    )

    try:
        default_workers = multiprocessing.cpu_count() or 1
    except NotImplementedError:
        default_workers = 1

    conf.set_default("workers", default_workers, group="api")

    conf(args, project='gnocchi', validate_default_values=True)
    log.setup(conf, 'gnocchi')
    conf.log_opt_values(LOG, logging.DEBUG)

    # NOTE(sileht): keystonemiddleware assume we use the global CONF object
    # (LP#1428317). In gnocchi, this is not the case, so we have to register
    # some keystoneclient options ourself. Missing options are hidden into
    # private area of keystonemiddleware and keystoneclient, so we
    # create a keystoneclient AuthPlugin object, that will register the options
    # into our configuration object. This have to be done after the
    # configuration files have been loaded because the authplugin options
    # depends of the authplugin present in the configuration file.
    keystoneclient.auth.register_conf_options(conf, 'keystone_authtoken')
    keystoneclient.auth.load_from_conf_options(conf, 'keystone_authtoken')

    return conf
示例#6
0
def prepare_service(args=None,
                    conf=None,
                    default_config_files=None,
                    log_to_std=False,
                    logging_level=None):
    if conf is None:
        conf = cfg.ConfigOpts()
    # FIXME(jd) Use the pkg_entry info to register the options of these libs
    db_options.set_defaults(conf)

    # Register our own Gnocchi options
    for group, options in opts.list_opts():
        conf.register_opts(list(options),
                           group=None if group == "DEFAULT" else group)

    conf.register_cli_opts(opts._cli_options)

    workers = utils.get_default_workers()
    conf.set_default("workers", workers, group="metricd")
    conf.set_default("aggregation_workers_number", workers, group="storage")

    conf(args,
         project='gnocchi',
         validate_default_values=True,
         default_config_files=default_config_files,
         version=pbr.version.VersionInfo('gnocchi').version_string())

    if not log_to_std and (conf.log_dir or conf.log_file):
        outputs = [
            daiquiri.output.File(filename=conf.log_file,
                                 directory=conf.log_dir)
        ]
    else:
        outputs = [daiquiri.output.STDERR]

    if conf.use_syslog:
        outputs.append(
            daiquiri.output.Syslog(facility=conf.syslog_log_facility))

    if conf.use_journal:
        outputs.append(daiquiri.output.Journal())

    daiquiri.setup(outputs=outputs)
    if logging_level is None:
        if conf.debug:
            logging_level = logging.DEBUG
        elif conf.verbose:
            logging_level = logging.INFO
        else:
            logging_level = logging.WARNING
    logging.getLogger("gnocchi").setLevel(logging_level)

    # HACK(jd) I'm not happy about that, fix AP class to handle a conf object?
    archive_policy.ArchivePolicy.DEFAULT_AGGREGATION_METHODS = (
        conf.archive_policy.default_aggregation_methods)

    # If no coordination URL is provided, default to using the indexer as
    # coordinator
    if conf.storage.coordination_url is None:
        if conf.storage.driver == "redis":
            conf.set_default("coordination_url", conf.storage.redis_url,
                             "storage")
        elif conf.incoming.driver == "redis":
            conf.set_default("coordination_url", conf.incoming.redis_url,
                             "storage")
        else:
            parsed = urlparse.urlparse(conf.indexer.url)
            proto, _, _ = parsed.scheme.partition("+")
            parsed = list(parsed)
            # Set proto without the + part
            parsed[0] = proto
            conf.set_default("coordination_url", urlparse.urlunparse(parsed),
                             "storage")

    conf.log_opt_values(LOG, logging.DEBUG)

    return conf
示例#7
0
文件: service.py 项目: luo-zn/gnocchi
def prepare_service(args=None, conf=None,
                    default_config_files=None,
                    log_to_std=False, logging_level=None,
                    skip_log_opts=False):
    if conf is None:
        conf = cfg.ConfigOpts()
    # FIXME(jd) Use the pkg_entry info to register the options of these libs
    db_options.set_defaults(conf)

    # Register our own Gnocchi options
    for group, options in opts.list_opts():
        conf.register_opts(list(options),
                           group=None if group == "DEFAULT" else group)

    conf.register_cli_opts(opts._cli_options)

    workers = utils.get_default_workers()
    conf.set_default("workers", workers, group="metricd")
    conf.set_default("parallel_operations", workers)

    conf(args, project='gnocchi', validate_default_values=True,
         default_config_files=default_config_files,
         version=gnocchi.__version__)

    utils.parallel_map.MAX_WORKERS = conf.parallel_operations

    if not log_to_std and (conf.log_dir or conf.log_file):
        outputs = [daiquiri.output.File(filename=conf.log_file,
                                        directory=conf.log_dir)]
    else:
        outputs = [daiquiri.output.STDERR]

    if conf.use_syslog:
        outputs.append(
            daiquiri.output.Syslog(facility=conf.syslog_log_facility))

    if conf.use_journal:
        outputs.append(daiquiri.output.Journal())

    daiquiri.setup(outputs=outputs)
    if logging_level is None:
        if conf.debug:
            logging_level = logging.DEBUG
        elif conf.verbose:
            logging_level = logging.INFO
        else:
            logging_level = logging.WARNING
    logging.getLogger("gnocchi").setLevel(logging_level)

    # HACK(jd) I'm not happy about that, fix AP class to handle a conf object?
    archive_policy.ArchivePolicy.DEFAULT_AGGREGATION_METHODS = (
        conf.archive_policy.default_aggregation_methods
    )

    # If no coordination URL is provided, default to using the indexer as
    # coordinator
    if conf.coordination_url is None:
        if conf.storage.driver == "redis":
            conf.set_default("coordination_url",
                             conf.storage.redis_url)
        elif conf.incoming.driver == "redis":
            conf.set_default("coordination_url",
                             conf.incoming.redis_url)
        else:
            parsed = urlparse.urlparse(conf.indexer.url)
            proto, _, _ = parsed.scheme.partition("+")
            parsed = list(parsed)
            # Set proto without the + part
            parsed[0] = proto
            conf.set_default("coordination_url",
                             urlparse.urlunparse(parsed))

    if not skip_log_opts:
        LOG.info("Gnocchi version %s", gnocchi.__version__)
        conf.log_opt_values(LOG, logging.DEBUG)

    return conf