def main(): cfg.parse_args(sys.argv) logging.setup(CONF, None) debug_utils.setup() from trove.guestagent import dbaas manager = dbaas.datastore_registry().get(CONF.datastore_manager) if not manager: msg = ("Manager class not registered for datastore manager %s" % CONF.datastore_manager) raise RuntimeError(msg) # rpc module must be loaded after decision about thread monkeypatching # because if thread module is not monkeypatched we can't use eventlet # executor from oslo_messaging library. from trove import rpc rpc.init(CONF) from trove.common.rpc import service as rpc_service from trove.common.rpc import version as rpc_version server = rpc_service.RpcService( manager=manager, host=CONF.guest_id, rpc_api_version=rpc_version.RPC_API_VERSION) launcher = openstack_service.launch(CONF, server) launcher.wait()
def initialize(extra_opts=None, pre_logging=None): # Import only the modules necessary to initialize logging and determine if # debug_utils are enabled. import sys from oslo_log import log as logging from trove.common import cfg from trove.common import debug_utils conf = cfg.CONF if extra_opts: conf.register_cli_opts(extra_opts) cfg.parse_args(sys.argv) if pre_logging: pre_logging(conf) logging.setup(conf, None) debug_utils.setup() # rpc module must be loaded after decision about thread monkeypatching # because if thread module is not monkeypatched we can't use eventlet # executor from oslo_messaging library. from trove import rpc rpc.init(conf) # Initialize Trove database. from trove.db import get_db_api get_db_api().configure_db(conf) return conf # May be used by other scripts
def main(): cfg.parse_args(sys.argv) logging.setup(CONF, None) debug_utils.setup() from trove.guestagent import dbaas manager = dbaas.datastore_registry().get(CONF.datastore_manager) if not manager: msg = (_LE("Manager class not registered for datastore manager %s") % CONF.datastore_manager) raise RuntimeError(msg) if not CONF.guest_id: msg = (_LE("The guest_id parameter is not set. guest_info.conf " "was not injected into the guest or not read by guestagent")) raise RuntimeError(msg) # BUG(1650518): Cleanup in the Pike release # make it fatal if CONF.instance_rpc_encr_key is None # rpc module must be loaded after decision about thread monkeypatching # because if thread module is not monkeypatched we can't use eventlet # executor from oslo_messaging library. from trove import rpc rpc.init(CONF) from trove.common.rpc import service as rpc_service server = rpc_service.RpcService( key=CONF.instance_rpc_encr_key, topic="guestagent.%s" % CONF.guest_id, manager=manager, host=CONF.guest_id, rpc_api_version=guest_api.API.API_LATEST_VERSION) launcher = openstack_service.launch(CONF, server) launcher.wait()
def get_server(target, endpoints, key, serializer=None, secure_serializer=ssz.SecureSerializer): assert TRANSPORT is not None # Thread module is not monkeypatched if remote debugging is enabled. # Using eventlet executor without monkepatching thread module will # lead to unpredictable results. from trove.common import debug_utils debug_utils.setup() executor = "blocking" if debug_utils.enabled() else "eventlet" # BUG(1650518): Cleanup in the Pike release # uncomment this (following) line in the pike release # assert key is not None serializer = secure_serializer( sz.TroveRequestContextSerializer(serializer), key) return messaging.get_rpc_server(TRANSPORT, target, endpoints, executor=executor, serializer=serializer)
def main(): cfg.parse_args(sys.argv) logging.setup(CONF, None) debug_utils.setup() # Patch 'thread' module if debug is disabled if not debug_utils.enabled(): eventlet.monkey_patch(thread=True) from trove.guestagent import dbaas manager = dbaas.datastore_registry().get(CONF.datastore_manager) if not manager: msg = ("Manager class not registered for datastore manager %s" % CONF.datastore_manager) raise RuntimeError(msg) # rpc module must be loaded after decision about thread monkeypatching # because if thread module is not monkeypatched we can't use eventlet # executor from oslo_messaging library. from trove import rpc rpc.init(CONF) from trove.common.rpc import service as rpc_service from trove.common.rpc import version as rpc_version server = rpc_service.RpcService( manager=manager, host=CONF.guest_id, rpc_api_version=rpc_version.RPC_API_VERSION) launcher = openstack_service.launch(CONF, server) launcher.wait()
def main(): log_levels = [ 'docker=WARN', ] default_log_levels = logging.get_default_log_levels() default_log_levels.extend(log_levels) logging.set_defaults(default_log_levels=default_log_levels) logging.register_options(CONF) cfg.parse_args(sys.argv) logging.setup(CONF, None) debug_utils.setup() from trove.guestagent import dbaas manager = dbaas.datastore_registry().get(CONF.datastore_manager) if not manager: msg = (_("Manager class not registered for datastore manager %s") % CONF.datastore_manager) raise RuntimeError(msg) if not CONF.guest_id: msg = (_("The guest_id parameter is not set. guest_info.conf " "was not injected into the guest or not read by guestagent")) raise RuntimeError(msg) # Create user and group for running docker container. LOG.info('Creating user and group for database service') uid = cfg.get_configuration_property('database_service_uid') operating_system.create_user('database', uid) # Mount device if needed. # When doing rebuild, the device should be already formatted but not # mounted. device_path = CONF.get(CONF.datastore_manager).device_path mount_point = CONF.get(CONF.datastore_manager).mount_point device = volume.VolumeDevice(device_path) if not device.mount_points(device_path): LOG.info('Preparing the storage for %s, mount path %s', device_path, mount_point) device.format() device.mount(mount_point) operating_system.chown(mount_point, CONF.database_service_uid, CONF.database_service_uid, recursive=True, as_root=True) # rpc module must be loaded after decision about thread monkeypatching # because if thread module is not monkeypatched we can't use eventlet # executor from oslo_messaging library. from trove import rpc rpc.init(CONF) from trove.common.rpc import service as rpc_service server = rpc_service.RpcService( key=CONF.instance_rpc_encr_key, topic="guestagent.%s" % CONF.guest_id, manager=manager, host=CONF.guest_id, rpc_api_version=guest_api.API.API_LATEST_VERSION) launcher = openstack_service.launch(CONF, server, restart_method='mutate') launcher.wait()
def main(): cfg.parse_args(sys.argv) logging.setup(CONF, None) debug_utils.setup() from trove.guestagent import dbaas manager = dbaas.datastore_registry().get(CONF.datastore_manager) if not manager: msg = (_("Manager class not registered for datastore manager %s") % CONF.datastore_manager) raise RuntimeError(msg) if not CONF.guest_id: msg = (_("The guest_id parameter is not set. guest_info.conf " "was not injected into the guest or not read by guestagent")) raise RuntimeError(msg) # BUG(1650518): Cleanup in the Pike release # make it fatal if CONF.instance_rpc_encr_key is None # rpc module must be loaded after decision about thread monkeypatching # because if thread module is not monkeypatched we can't use eventlet # executor from oslo_messaging library. from trove import rpc rpc.init(CONF) from trove.common.rpc import service as rpc_service server = rpc_service.RpcService( key=CONF.instance_rpc_encr_key, topic="guestagent.%s" % CONF.guest_id, manager=manager, host=CONF.guest_id, rpc_api_version=guest_api.API.API_LATEST_VERSION) launcher = openstack_service.launch(CONF, server, restart_method='mutate') launcher.wait()
def main(): cfg.parse_args(sys.argv) logging.setup(None) debug_utils.setup() if not debug_utils.enabled(): eventlet.monkey_patch(thread=True) launch_services()
def main(): cfg.parse_args(sys.argv) logging.setup(None) debug_utils.setup() get_db_api().configure_db(CONF) conf_file = CONF.find_file(CONF.api_paste_config) launcher = wsgi.launch('trove', CONF.bind_port or 8779, conf_file, workers=CONF.trove_api_workers) launcher.wait()
def get_server(target, endpoints, serializer=None): assert TRANSPORT is not None # Thread module is not monkeypatched if remote debugging is enabled. # Using eventlet executor without monkepatching thread module will # lead to unpredictable results. from trove.common import debug_utils debug_utils.setup() executor = "blocking" if debug_utils.enabled() else "eventlet" serializer = RequestContextSerializer(serializer) return messaging.get_rpc_server(TRANSPORT, target, endpoints, executor=executor, serializer=serializer)
def initialize(extra_opts=None, pre_logging=None): # Initialize localization support (the underscore character). import gettext gettext.install('trove', unicode=1) # Apply whole eventlet.monkey_patch excluding 'thread' module. # Decision for 'thread' module patching will be made # after debug_utils is set up. import eventlet eventlet.monkey_patch(all=True, thread=False) # Import only the modules necessary to initialize logging and determine if # debug_utils are enabled. import sys from oslo_log import log as logging from trove.common import cfg from trove.common import debug_utils conf = cfg.CONF if extra_opts: conf.register_cli_opts(extra_opts) cfg.parse_args(sys.argv) if pre_logging: pre_logging(conf) logging.setup(conf, None) debug_utils.setup() # Patch 'thread' module if debug is disabled. if not debug_utils.enabled(): eventlet.monkey_patch(thread=True) # rpc module must be loaded after decision about thread monkeypatching # because if thread module is not monkeypatched we can't use eventlet # executor from oslo_messaging library. from trove import rpc rpc.init(conf) # Initialize Trove database. from trove.db import get_db_api get_db_api().configure_db(conf) return conf # May be used by other scripts
def startup(topic): cfg.parse_args(sys.argv) logging.setup(None) debug_utils.setup() # Patch 'thread' module if debug is disabled if not debug_utils.enabled(): eventlet.monkey_patch(thread=True) from trove.common.rpc import service as rpc_service from trove.openstack.common import service as openstack_service from trove.db import get_db_api get_db_api().configure_db(CONF) server = rpc_service.RpcService(manager=CONF.taskmanager_manager, topic=topic) launcher = openstack_service.launch(server) launcher.wait()
def main(): cfg.parse_args(sys.argv) from trove.guestagent import dbaas logging.setup(None) debug_utils.setup() # Patch 'thread' module if debug is disabled if not debug_utils.enabled(): eventlet.monkey_patch(thread=True) manager = dbaas.datastore_registry().get(CONF.datastore_manager) if not manager: msg = ("Manager class not registered for datastore manager %s" % CONF.datastore_manager) raise RuntimeError(msg) server = rpc_service.RpcService(manager=manager, host=CONF.guest_id) launcher = openstack_service.launch(server) launcher.wait()
def initialize(extra_opts=None, pre_logging=None): # Initialize localization support (the underscore character). import gettext gettext.install('trove', unicode=1) # Apply whole eventlet.monkey_patch excluding 'thread' module. # Decision for 'thread' module patching will be made # after debug_utils is set up. import eventlet eventlet.monkey_patch(all=True, thread=False) # Import only the modules necessary to initialize logging and determine if # debug_utils are enabled. import sys from trove.common import cfg from trove.common import debug_utils from trove.openstack.common import log as logging conf = cfg.CONF if extra_opts: conf.register_cli_opts(extra_opts) cfg.parse_args(sys.argv) if pre_logging: pre_logging(conf) # Fore. 2014/7/3. krds patch. patch here to make all thing work well. from trove.patch import patch logging.setup(None) debug_utils.setup() # Patch 'thread' module if debug is disabled if not debug_utils.enabled(): eventlet.monkey_patch(thread=True) # Initialize Trove database. from trove.db import get_db_api get_db_api().configure_db(conf) return conf # May be used by other scripts