def _setup(): common_setup(service='auth', config=config, setup_db=True, register_mq_exchanges=False, register_signal_handlers=True, register_internal_trigger_types=False, run_migrations=False) if cfg.CONF.auth.mode not in VALID_MODES: raise ValueError('Valid modes are: %s' % (','.join(VALID_MODES)))
def _setup(): common_setup(service='garbagecollector', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, register_runners=False)
def setup_app(config=None): config = config or {} LOG.info("Creating st2api: %s as OpenAPI app.", VERSION_STRING) is_gunicorn = config.get("is_gunicorn", False) if is_gunicorn: # NOTE: We only want to perform this logic in the WSGI worker st2api_config.register_opts(ignore_errors=True) capabilities = { "name": "api", "listen_host": cfg.CONF.api.host, "listen_port": cfg.CONF.api.port, "type": "active", } # This should be called in gunicorn case because we only want # workers to connect to db, rabbbitmq etc. In standalone HTTP # server case, this setup would have already occurred. common_setup( service="api", config=st2api_config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, register_internal_trigger_types=True, run_migrations=True, service_registry=True, capabilities=capabilities, config_args=config.get("config_args", None), ) # Additional pre-run time checks validate_rbac_is_correctly_configured() router = Router(debug=cfg.CONF.api.debug, auth=cfg.CONF.auth.enable, is_gunicorn=is_gunicorn) spec = spec_loader.load_spec("st2common", "openapi.yaml.j2") transforms = { "^/api/v1/$": ["/v1"], "^/api/v1/": ["/", "/v1/"], "^/api/v1/executions": ["/actionexecutions", "/v1/actionexecutions"], "^/api/exp/": ["/exp/"], } router.add_spec(spec, transforms=transforms) app = router.as_wsgi # Order is important. Check middleware for detailed explanation. app = StreamingMiddleware(app, path_whitelist=["/v1/executions/*/output*"]) app = ErrorHandlingMiddleware(app) app = CorsMiddleware(app) app = LoggingMiddleware(app, router) app = ResponseInstrumentationMiddleware(app, router, service_name="api") app = RequestIDMiddleware(app) app = RequestInstrumentationMiddleware(app, router, service_name="api") return app
def _setup(): common_setup(service='actionrunner', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True) _setup_sigterm_handler()
def _setup(): common_setup(service='api', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, register_internal_trigger_types=True)
def setup_app(config=None): LOG.info('Creating st2auth: %s as Pecan app.', VERSION_STRING) is_gunicorn = getattr(config, 'is_gunicorn', False) if is_gunicorn: # This should be called in gunicorn case because we only want # workers to connect to db, rabbbitmq etc. In standalone HTTP # server case, this setup would have already occurred. st2auth_config.register_opts() common_setup(service='auth', config=st2auth_config, setup_db=True, register_mq_exchanges=False, register_signal_handlers=True, register_internal_trigger_types=False, run_migrations=False, config_args=config.config_args) if not config: # standalone HTTP server case config = _get_pecan_config() else: # gunicorn case if is_gunicorn: config.app = _get_pecan_config().app app_conf = dict(config.app) app = pecan.make_app( app_conf.pop('root'), logging=getattr(config, 'logging', {}), hooks=[hooks.JSONErrorResponseHook(), hooks.CorsHook()], **app_conf ) LOG.info('%s app created.' % __name__) return app
def _setup(): capabilities = { 'name': 'timerengine', 'type': 'passive' } common_setup(service='timer_engine', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, service_registry=True, capabilities=capabilities)
def _setup(): capabilities = { 'name': 'notifier', 'type': 'passive' } common_setup(service='notifier', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, service_registry=True, capabilities=capabilities)
def setup_app(config={}): LOG.info('Creating st2api: %s as OpenAPI app.', VERSION_STRING) is_gunicorn = config.get('is_gunicorn', False) if is_gunicorn: # Note: We need to perform monkey patching in the worker. If we do it in # the master process (gunicorn_config.py), it breaks tons of things # including shutdown monkey_patch() st2api_config.register_opts() capabilities = { 'name': 'api', 'listen_host': cfg.CONF.api.host, 'listen_port': cfg.CONF.api.port, 'type': 'active' } # This should be called in gunicorn case because we only want # workers to connect to db, rabbbitmq etc. In standalone HTTP # server case, this setup would have already occurred. common_setup(service='api', config=st2api_config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, register_internal_trigger_types=True, run_migrations=True, service_registry=True, capabilities=capabilities, config_args=config.get('config_args', None)) # Additional pre-run time checks validate_rbac_is_correctly_configured() router = Router(debug=cfg.CONF.api.debug, auth=cfg.CONF.auth.enable, is_gunicorn=is_gunicorn) spec = spec_loader.load_spec('st2common', 'openapi.yaml.j2') transforms = { '^/api/v1/$': ['/v1'], '^/api/v1/': ['/', '/v1/'], '^/api/v1/executions': ['/actionexecutions', '/v1/actionexecutions'], '^/api/exp/': ['/exp/'] } router.add_spec(spec, transforms=transforms) app = router.as_wsgi # Order is important. Check middleware for detailed explanation. app = StreamingMiddleware(app, path_whitelist=['/v1/executions/*/output*']) app = ErrorHandlingMiddleware(app) app = CorsMiddleware(app) app = LoggingMiddleware(app, router) app = ResponseInstrumentationMiddleware(app, router, service_name='api') app = RequestIDMiddleware(app) app = RequestInstrumentationMiddleware(app, router, service_name='api') return app
def _setup(): common_setup(service='auth', config=config, setup_db=True, register_mq_exchanges=False, register_signal_handlers=True, register_internal_trigger_types=False, run_migrations=False) # Additional pre-run time checks validate_auth_backend_is_correctly_configured()
def _setup(): common_setup(service='sensorcontainer', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, register_runners=False)
def main(): cfg.CONF.set_override("debug", True) common_setup( service="test", config=config, setup_db=False, run_migrations=False, register_runners=False, register_internal_trigger_types=False, register_mq_exchanges=False, register_signal_handlers=False, service_registry=False, config_args=["--config-file", ST2_CONFIG_DEBUG_LL_PATH], ) LOG.info("Test info message 1") LOG.debug("Test debug message 1") # 1. Actual unicode sequence LOG.info("Test info message with unicode 1 - 好好好") LOG.debug("Test debug message with unicode 1 - 好好好") # 2. Ascii escape sequence LOG.info("Test info message with unicode 1 - " + "好好好".encode( "ascii", "backslashreplace").decode("ascii", "backslashreplace")) LOG.debug("Test debug message with unicode 1 - " + "好好好".encode( "ascii", "backslashreplace").decode("ascii", "backslashreplace"))
def setup(): common_setup(service='workflow_engine', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True) setup_sigterm_handler()
def _setup(): common_setup( service="exporter", config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, )
def _setup(): common_setup(service='rulesengine', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, register_internal_trigger_types=True, register_runners=False)
def _setup(): common_setup(service='stream', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, register_internal_trigger_types=False, run_migrations=False)
def _setup(): capabilities = { 'name': 'rulesengine', 'type': 'passive' } common_setup(service='rulesengine', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, register_internal_trigger_types=True, register_runners=False, service_registry=True, capabilities=capabilities)
def _setup(): capabilities = { 'name': 'sensorcontainer', 'type': 'passive' } common_setup(service='sensorcontainer', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, register_runners=False, service_registry=True, capabilities=capabilities)
def _setup(): common_setup( service='api', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, register_internal_trigger_types=True)
def _setup(): capabilities = { 'name': 'actionrunner', 'type': 'passive' } common_setup(service='actionrunner', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, service_registry=True, capabilities=capabilities) _setup_sigterm_handler()
def setup_app(config=None): config = config or {} LOG.info("Creating st2auth: %s as OpenAPI app.", VERSION_STRING) is_gunicorn = config.get("is_gunicorn", False) if is_gunicorn: # NOTE: We only want to perform this logic in the WSGI worker st2auth_config.register_opts(ignore_errors=True) capabilities = { "name": "auth", "listen_host": cfg.CONF.auth.host, "listen_port": cfg.CONF.auth.port, "listen_ssl": cfg.CONF.auth.use_ssl, "type": "active", } # This should be called in gunicorn case because we only want # workers to connect to db, rabbbitmq etc. In standalone HTTP # server case, this setup would have already occurred. common_setup( service="auth", config=st2auth_config, setup_db=True, register_mq_exchanges=False, register_signal_handlers=True, register_internal_trigger_types=False, run_migrations=False, service_registry=True, capabilities=capabilities, config_args=config.get("config_args", None), ) # pysaml2 uses subprocess communicate which calls communicate_with_poll if cfg.CONF.auth.sso and cfg.CONF.auth.sso_backend == "saml2": use_select_poll_workaround(nose_only=False) # Additional pre-run time checks validate_auth_backend_is_correctly_configured() router = Router(debug=cfg.CONF.auth.debug, is_gunicorn=is_gunicorn) spec = spec_loader.load_spec("st2common", "openapi.yaml.j2") transforms = {"^/auth/v1/": ["/", "/v1/"]} router.add_spec(spec, transforms=transforms) app = router.as_wsgi # Order is important. Check middleware for detailed explanation. app = ErrorHandlingMiddleware(app) app = CorsMiddleware(app) app = LoggingMiddleware(app, router) app = ResponseInstrumentationMiddleware(app, router, service_name="auth") app = RequestIDMiddleware(app) app = RequestInstrumentationMiddleware(app, router, service_name="auth") return app
def _setup(): capabilities = { 'name': 'garbagecollector', 'type': 'passive' } common_setup(service='garbagecollector', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, register_runners=False, service_registry=True, capabilities=capabilities)
def setup(): common_setup( service='workflow_engine', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True ) setup_sigterm_handler()
def _setup(): capabilities = { 'name': 'stream', 'listen_host': cfg.CONF.stream.host, 'listen_port': cfg.CONF.stream.port, 'type': 'active' } common_setup(service='stream', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, register_internal_trigger_types=False, run_migrations=False, service_registry=True, capabilities=capabilities)
def _setup(): common_setup(service='api', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, register_internal_trigger_types=True) # Additional pre-run time checks validate_rbac_is_correctly_configured()
def _setup(): capabilities = {"name": "timerengine", "type": "passive"} common_setup( service="timer_engine", config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, service_registry=True, capabilities=capabilities, )
def _setup(): capabilities = {"name": "notifier", "type": "passive"} common_setup( service=NOTIFIER, config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, service_registry=True, capabilities=capabilities, )
def _setup(): capabilities = {'name': 'rulesengine', 'type': 'passive'} common_setup(service='rulesengine', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, register_internal_trigger_types=True, register_runners=False, service_registry=True, capabilities=capabilities)
def setup(): capabilities = {"name": "workflowengine", "type": "passive"} common_setup( service=WORKFLOW_ENGINE, config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, service_registry=True, capabilities=capabilities, )
def _setup(): capabilities = {"name": "actionrunner", "type": "passive"} common_setup( service=ACTIONRUNNER, config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, service_registry=True, capabilities=capabilities, )
def setup_app(config=None): LOG.info('Creating st2api: %s as Pecan app.', VERSION_STRING) is_gunicorn = getattr(config, 'is_gunicorn', False) if is_gunicorn: # Note: We need to perform monkey patching in the worker. If we do it in # the master process (gunicorn_config.py), it breaks tons of things # including shutdown monkey_patch() st2api_config.register_opts() # This should be called in gunicorn case because we only want # workers to connect to db, rabbbitmq etc. In standalone HTTP # server case, this setup would have already occurred. common_setup(service='api', config=st2api_config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, register_internal_trigger_types=True, run_migrations=True, config_args=config.config_args) if not config: # standalone HTTP server case config = _get_pecan_config() else: # gunicorn case if is_gunicorn: config.app = _get_pecan_config().app app_conf = dict(config.app) active_hooks = [ hooks.RequestIDHook(), hooks.JSONErrorResponseHook(), hooks.LoggingHook() ] active_hooks.append(hooks.AuthHook()) active_hooks.append(hooks.CorsHook()) app = pecan.make_app(app_conf.pop('root'), logging=getattr(config, 'logging', {}), hooks=active_hooks, **app_conf) # Static middleware which servers common static assets such as logos static_root = os.path.join(BASE_DIR, 'public') app = StaticFileMiddleware(app=app, directory=static_root) LOG.info('%s app created.' % __name__) return app
def setup(): capabilities = {'name': 'workflowengine', 'type': 'passive'} common_setup(service='workflow_engine', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, service_registry=True, capabilities=capabilities) setup_sigterm_handler()
def setup_app(config={}): LOG.info("Creating st2stream: %s as OpenAPI app.", VERSION_STRING) is_gunicorn = config.get("is_gunicorn", False) if is_gunicorn: # Note: We need to perform monkey patching in the worker. If we do it in # the master process (gunicorn_config.py), it breaks tons of things # including shutdown monkey_patch() st2stream_config.register_opts() capabilities = { "name": "stream", "listen_host": cfg.CONF.stream.host, "listen_port": cfg.CONF.stream.port, "type": "active", } # This should be called in gunicorn case because we only want # workers to connect to db, rabbbitmq etc. In standalone HTTP # server case, this setup would have already occurred. common_setup( service="stream", config=st2stream_config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, register_internal_trigger_types=False, run_migrations=False, service_registry=True, capabilities=capabilities, config_args=config.get("config_args", None), ) router = Router(debug=cfg.CONF.stream.debug, auth=cfg.CONF.auth.enable, is_gunicorn=is_gunicorn) spec = spec_loader.load_spec("st2common", "openapi.yaml.j2") transforms = {"^/stream/v1/": ["/", "/v1/"]} router.add_spec(spec, transforms=transforms) app = router.as_wsgi # Order is important. Check middleware for detailed explanation. app = StreamingMiddleware(app) app = ErrorHandlingMiddleware(app) app = CorsMiddleware(app) app = LoggingMiddleware(app, router) app = ResponseInstrumentationMiddleware(app, router, service_name="stream") app = RequestIDMiddleware(app) app = RequestInstrumentationMiddleware(app, router, service_name="stream") return app
def _setup(): capabilities = {"name": "sensorcontainer", "type": "passive"} common_setup( service="sensorcontainer", config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, register_runners=False, service_registry=True, capabilities=capabilities, )
def _setup(): capabilities = {"name": "garbagecollector", "type": "passive"} common_setup( service=GARBAGE_COLLECTOR, config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, register_runners=False, service_registry=True, capabilities=capabilities, )
def setup_app(config=None): LOG.info('Creating st2api: %s as Pecan app.', VERSION_STRING) is_gunicorn = getattr(config, 'is_gunicorn', False) if is_gunicorn: # Note: We need to perform monkey patching in the worker. If we do it in # the master process (gunicorn_config.py), it breaks tons of things # including shutdown monkey_patch() st2api_config.register_opts() # This should be called in gunicorn case because we only want # workers to connect to db, rabbbitmq etc. In standalone HTTP # server case, this setup would have already occurred. common_setup(service='api', config=st2api_config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, register_internal_trigger_types=True, run_migrations=True, config_args=config.config_args) if not config: # standalone HTTP server case config = _get_pecan_config() else: # gunicorn case if is_gunicorn: config.app = _get_pecan_config().app app_conf = dict(config.app) active_hooks = [hooks.RequestIDHook(), hooks.JSONErrorResponseHook(), hooks.LoggingHook()] if cfg.CONF.auth.enable: active_hooks.append(hooks.AuthHook()) active_hooks.append(hooks.CorsHook()) app = pecan.make_app(app_conf.pop('root'), logging=getattr(config, 'logging', {}), hooks=active_hooks, **app_conf ) # Static middleware which servers common static assets such as logos static_root = os.path.join(BASE_DIR, 'public') app = StaticFileMiddleware(app=app, directory=static_root) LOG.info('%s app created.' % __name__) return app
def _setup(): capabilities = {"name": "scheduler", "type": "passive"} common_setup( service=SCHEDULER, config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, service_registry=True, capabilities=capabilities, ) _setup_sigterm_handler()
def _setup(): capabilities = { 'name': 'auth', 'listen_host': cfg.CONF.auth.host, 'listen_port': cfg.CONF.auth.port, 'listen_ssl': cfg.CONF.auth.use_ssl, 'type': 'active' } common_setup(service='auth', config=config, setup_db=True, register_mq_exchanges=False, register_signal_handlers=True, register_internal_trigger_types=False, run_migrations=False, service_registry=True, capabilities=capabilities) # Additional pre-run time checks validate_auth_backend_is_correctly_configured()
def _setup(): capabilities = { 'name': 'api', 'listen_host': cfg.CONF.api.host, 'listen_port': cfg.CONF.api.port, 'type': 'active' } common_setup(service='api', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, register_internal_trigger_types=True, service_registry=True, capabilities=capabilities) # Additional pre-run time checks validate_rbac_is_correctly_configured()
def setup(): capabilities = { 'name': 'workflowengine', 'type': 'passive' } common_setup( service='workflow_engine', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, service_registry=True, capabilities=capabilities ) setup_sigterm_handler()
def setup_app(config=None): LOG.info("Creating st2api: %s as Pecan app.", VERSION_STRING) is_gunicorn = getattr(config, "is_gunicorn", False) if is_gunicorn: st2api_config.register_opts() # This should be called in gunicorn case because we only want # workers to connect to db, rabbbitmq etc. In standalone HTTP # server case, this setup would have already occurred. common_setup( service="api", config=st2api_config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True, register_internal_trigger_types=True, run_migrations=True, config_args=config.config_args, ) if not config: # standalone HTTP server case config = _get_pecan_config() else: # gunicorn case if is_gunicorn: config.app = _get_pecan_config().app app_conf = dict(config.app) active_hooks = [hooks.RequestIDHook(), hooks.JSONErrorResponseHook(), hooks.LoggingHook()] if cfg.CONF.auth.enable: active_hooks.append(hooks.AuthHook()) active_hooks.append(hooks.CorsHook()) app = pecan.make_app(app_conf.pop("root"), logging=getattr(config, "logging", {}), hooks=active_hooks, **app_conf) # Static middleware which servers common static assets such as logos static_root = os.path.join(BASE_DIR, "public") app = StaticFileMiddleware(app=app, directory=static_root) LOG.info("%s app created." % __name__) return app
def setup_app(config={}): LOG.info('Creating st2auth: %s as OpenAPI app.', VERSION_STRING) is_gunicorn = config.get('is_gunicorn', False) if is_gunicorn: # Note: We need to perform monkey patching in the worker. If we do it in # the master process (gunicorn_config.py), it breaks tons of things # including shutdown monkey_patch() # This should be called in gunicorn case because we only want # workers to connect to db, rabbbitmq etc. In standalone HTTP # server case, this setup would have already occurred. st2auth_config.register_opts() common_setup(service='auth', config=st2auth_config, setup_db=True, register_mq_exchanges=False, register_signal_handlers=True, register_internal_trigger_types=False, run_migrations=False, config_args=config.get('config_args', None)) # Additional pre-run time checks validate_auth_backend_is_correctly_configured() router = Router(debug=cfg.CONF.auth.debug) spec = spec_loader.load_spec('st2common', 'openapi.yaml.j2') transforms = { '^/auth/v1/': ['/', '/v1/'] } router.add_spec(spec, transforms=transforms) app = router.as_wsgi # Order is important. Check middleware for detailed explanation. app = ErrorHandlingMiddleware(app) app = CorsMiddleware(app) app = LoggingMiddleware(app, router) app = RequestIDMiddleware(app) return app
def _setup(): common_setup(service='garbagecollector', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True)
def _setup(): common_setup(service='resultstracker', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True)
from oslo_config import cfg from st2api import config # noqa from st2common.service_setup import setup as common_setup __all__ = [ 'server', 'app' ] DEFAULT_ST2_CONFIG_PATH = '/etc/st2/st2.conf' ST2_CONFIG_PATH = os.environ.get('ST2_CONFIG_PATH', DEFAULT_ST2_CONFIG_PATH) CONFIG_ARGS = ['--config-file', ST2_CONFIG_PATH] common_setup(service='api', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=False, register_internal_trigger_types=True, config_args=CONFIG_ARGS) server = { 'host': cfg.CONF.api.host, 'port': cfg.CONF.api.port } app = { 'root': 'st2api.controllers.root.RootController', 'modules': ['st2api'], 'debug': cfg.CONF.api_pecan.debug, 'errors': {'__force_dict__': True} }
def _setup(): common_setup(service='rulesengine', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True)
def _setup(): common_setup(service='sensorcontainer', config=config, setup_db=True, register_mq_exchanges=True, register_signal_handlers=True)