def main(): try: config.setup_remote_pydev_debug() CONF = config.CONF CONF(sys.argv[1:], project='barbican', version=version.version_info.version_string) # Import and configure logging. log.setup(CONF, 'barbican') LOG = log.getLogger(__name__) LOG.info("Booting up Barbican Keystone listener node...") # Queuing initialization queue.init(CONF) if getattr(getattr(CONF, queue.KS_NOTIFICATIONS_GRP_NAME), 'enable'): service.launch( CONF, keystone_listener.MessageServer(CONF), restart_method='mutate' ).wait() else: LOG.info("Exiting as Barbican Keystone listener is not enabled...") except RuntimeError as e: fail(1, e)
def _wrapper(global_config, **local_conf): # Queuing initialization queue.init(CONF, is_server_side=False) # Configure oslo logging and configuration services. log.setup(CONF, 'barbican') LOG = log.getLogger(__name__) config.setup_remote_pydev_debug() # Initializing the database engine and session factory before the app # starts ensures we don't lose requests due to lazy initialization of # db connections. try: repositories.setup_database_engine_and_factory( initialize_secret_stores=True ) repositories.commit() except Exception: LOG.exception('Failed to sync secret_stores table.') repositories.rollback() raise wsgi_app = func(global_config, **local_conf) if newrelic_loaded: wsgi_app = newrelic.agent.WSGIApplicationWrapper(wsgi_app) LOG.info('Barbican app created and initialized') return wsgi_app
def create_main_app(global_config, **local_conf): """uWSGI factory method for the Barbican-API application.""" # Queuing initialization queue.init(CONF, is_server_side=False) # Configure oslo logging and configuration services. log.setup(CONF, 'barbican') config.setup_remote_pydev_debug() # Initializing the database engine and session factory before the app # starts ensures we don't lose requests due to lazy initialiation of db # connections. repositories.setup_database_engine_and_factory() # Setup app with transactional hook enabled wsgi_app = build_wsgi_app(transactional=True) if newrelic_loaded: wsgi_app = newrelic.agent.WSGIApplicationWrapper(wsgi_app) LOG = log.getLogger(__name__) LOG.info(u._LI('Barbican app created and initialized')) return wsgi_app
def setUp(self): super(WhenUsingMessageServer, self).setUp() queue.init(self.conf) patcher = mock.patch('oslo_messaging.server.MessageHandlingServer') mock_server_class = patcher.start() self.addCleanup(patcher.stop) self.msg_server_mock = mock_server_class() self.msg_server_mock.start.return_value = None self.msg_server_mock.stop.return_value = None self.msg_server_mock.wait.return_value = None
def setUp(self): super(WhenUsingMessageServer, self).setUp() queue.init(self.conf) patcher = mock.patch('oslo.messaging.server.MessageHandlingServer') mock_server_class = patcher.start() self.addCleanup(patcher.stop) self.msg_server_mock = mock_server_class() self.msg_server_mock.start.return_value = None self.msg_server_mock.stop.return_value = None self.msg_server_mock.wait.return_value = None
def setUp(self): super(WhenUsingMessageServer, self).setUp() queue.init(self.conf) patcher = mock.patch('oslo_messaging.notify.listener.' 'NotificationServer') mock_server_class = patcher.start() self.addCleanup(patcher.stop) self.msg_server_mock = mock_server_class() self.msg_server_mock.start.return_value = None self.msg_server_mock.stop.return_value = None self.msg_server_mock.wait.return_value = None
def main(): try: CONF = config.CONF # Import and configure logging. log.setup(CONF, 'barbican-retry-scheduler') LOG = log.getLogger(__name__) LOG.debug("Booting up Barbican worker retry/scheduler node...") # Queuing initialization (as a client only). queue.init(CONF, is_server_side=False) service.launch(CONF, retry_scheduler.PeriodicServer()).wait() except RuntimeError as e: fail(1, e)
def main(): try: CONF = config.CONF # Import and configure logging. log.setup(CONF, "barbican") LOG = log.getLogger(__name__) LOG.debug("Booting up Barbican worker node...") # Queuing initialization queue.init(CONF) service.launch(CONF, server.TaskServer(), workers=CONF.queue.asynchronous_workers).wait() except RuntimeError as e: fail(1, e)
def main(): try: CONF = config.CONF # Import and configure logging. log.setup(CONF, 'barbican') LOG = log.getLogger(__name__) LOG.debug("Booting up Barbican worker node...") # Queuing initialization queue.init(CONF) service.launch(CONF, server.TaskServer()).wait() except RuntimeError as e: fail(1, e)
def create_main_app(global_config, **local_conf): """uWSGI factory method for the Barbican-API application""" # Configure oslo logging and configuration services. config.parse_args() log.setup('barbican') # Crypto Plugin Manager crypto_mgr = ext.CryptoExtensionManager() # Queuing initialization CONF = cfg.CONF queue.init(CONF) # Resources versions = res.VersionResource() secrets = res.SecretsResource(crypto_mgr) secret = res.SecretResource(crypto_mgr) orders = res.OrdersResource() order = res.OrderResource() verifications = res.VerificationsResource() verification = res.VerificationResource() containers = res.ContainersResource() container = res.ContainerResource() # For performance testing only performance = res.PerformanceResource() performance_uri = 'mu-1a90dfd0-7e7abba4-4e459908-fc097d60' wsgi_app = api = falcon.API() if newrelic_loaded: wsgi_app = newrelic.agent.WSGIApplicationWrapper(wsgi_app) api.add_route('/', versions) api.add_route('/v1/{keystone_id}/secrets', secrets) api.add_route('/v1/{keystone_id}/secrets/{secret_id}', secret) api.add_route('/v1/{keystone_id}/orders', orders) api.add_route('/v1/{keystone_id}/orders/{order_id}', order) api.add_route('/v1/{keystone_id}/verifications', verifications) api.add_route('/v1/{keystone_id}/verifications/{verification_id}', verification) api.add_route('/v1/{keystone_id}/containers/', containers) api.add_route('/v1/{keystone_id}/containers/{container_id}', container) # For performance testing only api.add_route('/{0}'.format(performance_uri), performance) return wsgi_app
def main(): try: CONF = config.CONF # Import and configure logging. log.setup(CONF, 'barbican-retry-scheduler') LOG = log.getLogger(__name__) LOG.debug("Booting up Barbican worker retry/scheduler node...") # Queuing initialization (as a client only). queue.init(CONF, is_server_side=False) service.launch( CONF, retry_scheduler.PeriodicServer() ).wait() except RuntimeError as e: fail(1, e)
def main(): try: CONF = config.CONF CONF(sys.argv[1:], project='barbican', version=version.version_info.version_string) # Import and configure logging. log.setup(CONF, 'barbican') LOG = log.getLogger(__name__) LOG.debug("Booting up Barbican worker node...") # Queuing initialization queue.init(CONF) service.launch(CONF, server.TaskServer(), workers=CONF.queue.asynchronous_workers).wait() except RuntimeError as e: fail(1, e)
def main(): try: CONF = config.CONF CONF(sys.argv[1:], project='barbican', version=version.version_info.version_string) # Import and configure logging. log.setup(CONF, 'barbican-retry-scheduler') LOG = log.getLogger(__name__) LOG.debug("Booting up Barbican worker retry/scheduler node...") # Queuing initialization (as a client only). queue.init(CONF, is_server_side=False) service.launch(CONF, retry_scheduler.PeriodicServer(), restart_method='mutate').wait() except RuntimeError as e: fail(1, e)
def main(): try: CONF = config.CONF CONF(sys.argv[1:], project='barbican', version=version.version_info.version_string) # Import and configure logging. log.setup(CONF, 'barbican-retry-scheduler') LOG = log.getLogger(__name__) LOG.debug("Booting up Barbican worker retry/scheduler node...") # Queuing initialization (as a client only). queue.init(CONF, is_server_side=False) service.launch( CONF, retry_scheduler.PeriodicServer(), restart_method='mutate' ).wait() except RuntimeError as e: fail(1, e)
def _wrapper(global_config, **local_conf): # Queuing initialization queue.init(CONF, is_server_side=False) # Configure oslo logging and configuration services. log.setup(CONF, 'barbican') config.setup_remote_pydev_debug() # Initializing the database engine and session factory before the app # starts ensures we don't lose requests due to lazy initialization of # db connections. repositories.setup_database_engine_and_factory() wsgi_app = func(global_config, **local_conf) if newrelic_loaded: wsgi_app = newrelic.agent.WSGIApplicationWrapper(wsgi_app) LOG = log.getLogger(__name__) LOG.info(u._LI('Barbican app created and initialized')) return wsgi_app
def _wrapper(global_config, **local_conf): # Queuing initialization queue.init(CONF, is_server_side=False) # Configure oslo logging and configuration services. log.setup(CONF, 'barbican') config.setup_remote_pydev_debug() # Initializing the database engine and session factory before the app # starts ensures we don't lose requests due to lazy initialiation of db # connections. repositories.setup_database_engine_and_factory() wsgi_app = func(global_config, **local_conf) if newrelic_loaded: wsgi_app = newrelic.agent.WSGIApplicationWrapper(wsgi_app) LOG = log.getLogger(__name__) LOG.info(u._LI('Barbican app created and initialized')) return wsgi_app
def create_main_app(global_config, **local_conf): """uWSGI factory method for the Barbican-API application.""" # Configure oslo logging and configuration services. config.parse_args() log.setup('barbican') config.setup_remote_pydev_debug() # Queuing initialization CONF = cfg.CONF queue.init(CONF) class RootController(object): secrets = secrets.SecretsController() orders = orders.OrdersController() containers = containers.ContainersController() transport_keys = transportkeys.TransportKeysController() wsgi_app = PecanAPI(RootController(), force_canonical=False) if newrelic_loaded: wsgi_app = newrelic.agent.WSGIApplicationWrapper(wsgi_app) return wsgi_app
def main(): try: CONF = config.CONF CONF(sys.argv[1:], project='barbican', version=version.version_info.version_string) # Import and configure logging. log.setup(CONF, 'barbican') LOG = log.getLogger(__name__) LOG.debug("Booting up Barbican worker node...") # Queuing initialization queue.init(CONF) service.launch( CONF, server.TaskServer(), workers=CONF.queue.asynchronous_workers, restart_method='mutate' ).wait() except RuntimeError as e: fail(1, e)
def create_main_app(global_config, **local_conf): """uWSGI factory method for the Barbican-API application.""" # Configure oslo logging and configuration services. config.parse_args() log.setup('barbican') config.setup_remote_pydev_debug() # Queuing initialization CONF = cfg.CONF queue.init(CONF, is_server_side=False) class RootController(object): secrets = secrets.SecretsController() orders = orders.OrdersController() containers = containers.ContainersController() transport_keys = transportkeys.TransportKeysController() wsgi_app = PecanAPI( RootController(), is_transactional=True, force_canonical=False) if newrelic_loaded: wsgi_app = newrelic.agent.WSGIApplicationWrapper(wsgi_app) return wsgi_app
def main(): try: config.setup_remote_pydev_debug() CONF = config.CONF # Import and configure logging. log.setup(CONF, 'barbican') LOG = log.getLogger(__name__) LOG.info(u._LI("Booting up Barbican Keystone listener node...")) # Queuing initialization queue.init(CONF) if getattr(getattr(CONF, queue.KS_NOTIFICATIONS_GRP_NAME), 'enable'): service.launch(CONF, keystone_listener.MessageServer(CONF)).wait() else: LOG.info( u._LI("Exiting as Barbican Keystone listener" " is not enabled...")) except RuntimeError as e: fail(1, e)
from barbican import queue from barbican.queue import retry_scheduler from oslo_config import cfg from oslo_log import log def fail(returncode, e): sys.stderr.write("ERROR: {0}\n".format(e)) sys.exit(returncode) if __name__ == '__main__': try: config.parse_args() CONF = cfg.CONF # Import and configure logging. log.setup(CONF, 'barbican-retry-scheduler') LOG = log.getLogger(__name__) LOG.debug("Booting up Barbican worker retry/scheduler node...") # Queuing initialization (as a client only). queue.init(CONF, is_server_side=False) service.launch( retry_scheduler.PeriodicServer() ).wait() except RuntimeError as e: fail(1, e)
from oslo_service import service def fail(returncode, e): sys.stderr.write("ERROR: {0}\n".format(e)) sys.exit(returncode) if __name__ == '__main__': try: config.setup_remote_pydev_debug() # Import and configure logging. log.setup('barbican') LOG = log.getLogger(__name__) LOG.info("Booting up Barbican Keystone listener node...") # Queuing initialization CONF = config.CONF queue.init(CONF) if getattr(getattr(CONF, queue.KS_NOTIFICATIONS_GRP_NAME), 'enable'): service.launch( CONF, keystone_listener.MessageServer(CONF) ).wait() else: LOG.info("Exiting as Barbican Keystone listener is not enabled...") except RuntimeError as e: fail(1, e)
def fail(returncode, e): sys.stderr.write("ERROR: {0}\n".format(e)) sys.exit(returncode) if __name__ == '__main__': try: config.parse_args() config.setup_remote_pydev_debug() # Import and configure logging. log.setup('barbican') LOG = log.getLogger(__name__) LOG.info("Booting up Barbican Keystone listener node...") # Queuing initialization CONF = cfg.CONF queue.init(CONF) if getattr(getattr(CONF, queue.KS_NOTIFICATIONS_GRP_NAME), 'enable'): service.launch( keystone_listener.MessageServer(CONF) ).wait() else: LOG.info("Exiting as Barbican Keystone listener is not enabled...") except RuntimeError as e: fail(1, e)