def setup_config(with_fake_datasource=True, node_id='testnode', same_partition_as_node=None, api=True, policy=True, datasources=True): """Setup DseNode for testing. :param: services is an array of DataServices :param: api is a dictionary mapping api name to API model instance """ config.set_config_defaults() # Load the fake driver. cfg.CONF.set_override('drivers', ['congress.tests.fake_datasource.FakeDataSource']) if same_partition_as_node is None: node = helper.make_dsenode_new_partition(node_id) else: node = helper.make_dsenode_same_partition(same_partition_as_node, node_id) if datasources: cfg.CONF.set_override('datasources', True) with mock.patch.object(periodics, 'PeriodicWorker', autospec=True): services = harness.create2(existing_node=node, policy_engine=policy, api=api, datasources=datasources) data = None if with_fake_datasource: data = fake_datasource.FakeDataSource('data') # FIXME(ekcs): this is a hack to prevent the synchronizer from # attempting to delete this DSD because it's not in DB data.type = 'no_sync_datasource_driver' node.register_service(data) engine_service = None library_service = None api_service = None if policy: engine_service = services[api_base.ENGINE_SERVICE_ID] library_service = services[api_base.LIBRARY_SERVICE_ID] if api: api_service = services['api'] if datasources: ds_manager = services['ds_manager'] return { 'node': node, 'engine': engine_service, 'library': library_service, 'data': data, 'api': api_service, 'ds_manager': ds_manager }
def setup_config(with_fake_datasource=True, node_id='testnode', same_partition_as_node=None, api=True, policy=True, datasources=True, with_fake_json_ingester=False): """Setup DseNode for testing. :param: services is an array of DataServices :param: api is a dictionary mapping api name to API model instance """ config.set_config_defaults() # Load the fake driver. cfg.CONF.set_override( 'drivers', ['congress.tests.fake_datasource.FakeDataSource']) if same_partition_as_node is None: node = helper.make_dsenode_new_partition(node_id) else: node = helper.make_dsenode_same_partition( same_partition_as_node, node_id) if datasources: cfg.CONF.set_override('datasources', True) with mock.patch.object(periodics, 'PeriodicWorker', autospec=True): services = harness.create2( existing_node=node, policy_engine=policy, api=api, datasources=datasources) data = None if with_fake_datasource: data = fake_datasource.FakeDataSource('data') # FIXME(ekcs): this is a hack to prevent the synchronizer from # attempting to delete this DSD because it's not in DB data.type = 'no_sync_datasource_driver' node.register_service(data) ingester = None if with_fake_json_ingester: ingester = fake_datasource.FakeJsonIngester() node.register_service(ingester) engine_service = None library_service = None api_service = None if policy: engine_service = services[api_base.ENGINE_SERVICE_ID] library_service = services[api_base.LIBRARY_SERVICE_ID] if api: api_service = services['api'] if datasources: ds_manager = services['ds_manager'] return {'node': node, 'engine': engine_service, 'library': library_service, 'data': data, 'api': api_service, 'ds_manager': ds_manager, 'json_ingester': ingester}
def pipeline_factory(loader, global_conf, **local_conf): """Create a paste pipeline based on the 'auth_strategy' config option.""" config.set_config_defaults() pipeline = local_conf[cfg.CONF.auth_strategy] pipeline = pipeline.split() filters = [loader.get_filter(n) for n in pipeline[:-1]] app = loader.get_app(pipeline[-1]) filters.reverse() for filter in filters: app = filter(app) return app
def launch_api_server(): LOG.info("Starting congress server on port %d", cfg.CONF.bind_port) # API resource runtime encapsulation: # event loop -> wsgi server -> webapp -> resource manager paste_config = config.find_paste_config() config.set_config_defaults() servers = [] servers.append(create_api_server(paste_config, cfg.CONF.dse.node_id, cfg.CONF.bind_host, cfg.CONF.bind_port, cfg.CONF.api_workers)) return servers
def launch_servers(node_id, api, policy, data): servers = [] if api: LOG.info("Starting congress API server on port %d", cfg.CONF.bind_port) # API resource runtime encapsulation: # event loop -> wsgi server -> webapp -> resource manager paste_config = config.find_paste_config() config.set_config_defaults() servers.append(create_api_server(paste_config, node_id, cfg.CONF.bind_host, cfg.CONF.bind_port, cfg.CONF.api_workers, policy_engine=policy, datasources=data)) else: LOG.info("Starting congress server on node %s", node_id) servers.append(create_nonapi_server(node_id, policy, data, cfg.CONF.api_workers)) return servers
def main(): config.init(sys.argv[1:]) if not cfg.CONF.config_file: sys.exit("ERROR: Unable to find configuration file via default " "search paths ~/.congress/, ~/, /etc/congress/, /etc/) and " "the '--config-file' option!") config.setup_logging() LOG.info("Starting congress server on port %d", cfg.CONF.bind_port) # API resource runtime encapsulation: # event loop -> wsgi server -> webapp -> resource manager paste_config = config.find_paste_config() config.set_config_defaults() servers = [] servers.append(create_api_server(paste_config, "congress", cfg.CONF.bind_host, cfg.CONF.bind_port, cfg.CONF.api_workers)) serve(*servers)