def create_nonapi_server(node_id, policy_engine, datasources, workers): congress_server = eventlet_server.Server( node_id, bus_id=cfg.CONF.dse.bus_id) harness.create2(existing_node=congress_server.node, api=False, policy_engine=policy_engine, datasources=datasources) return node_id, ServerWrapper(congress_server, workers)
def congress_app_factory(global_conf, **local_conf): if getattr(cfg.CONF, "distributed_architecture", False): # global_conf only accepts an iteratable value as its dict value services = harness.create2( node=global_conf['node'][0], # value must be iterables policy_engine=global_conf['flags']['policy_engine'], api=global_conf['flags']['api'], datasources=global_conf['flags']['datasources']) return application.ApiApplication(services['api_service']) else: if cfg.CONF.root_path: root_path = cfg.CONF.root_path else: root_path = utils.get_root_path() data_path = cfg.CONF.datasource_file cage = harness.create(root_path, data_path) api_process_dict = dict([[name, service_obj['object']] for name, service_obj in cage.getservices().items() if 'object' in service_obj]) api_resource_mgr = application.ResourceManager() router.APIRouterV1(api_resource_mgr, api_process_dict) return application.ApiApplication(api_resource_mgr)
def setUp(self): super(TestDsePerformance, self).setUp() cfg.CONF.set_override( 'drivers', [('congress.tests.datasources.performance_datasource_driver' '.PerformanceTestDriver')]) self.cage = helper.make_dsenode_new_partition("perf") harness.create2(existing_node=self.cage) self.api = {'policy': self.cage.service_object('api-policy'), 'rule': self.cage.service_object('api-rule'), 'table': self.cage.service_object('api-table'), 'row': self.cage.service_object('api-row'), 'datasource': self.cage.service_object('api-datasource'), 'status': self.cage.service_object('api-status'), 'schema': self.cage.service_object('api-schema')} self.engine = self.cage.service_object(api_base.ENGINE_SERVICE_ID)
def setup_config(with_fake_datasource=True): """Setup DseNode for testing. :param services is an array of DataServices :param api is a dictionary mapping api name to API model instance """ cfg.CONF.set_override('distributed_architecture', True) # Load the fake driver. cfg.CONF.set_override( 'drivers', ['congress.tests.fake_datasource.FakeDataSource']) node = helper.make_dsenode_new_partition("testnode") services = harness.create2(node=node) # Always register engine and fake datasource # engine = Dse2Runtime('engine') # node.register_service(engine) data = None if with_fake_datasource: data = fake_datasource.FakeDataSource('data') node.register_service(data) # Register provided apis (and no others) # (ResourceManager inherits from DataService) # api_map = {a.name: a for a in api} # api_resource_mgr = application.ResourceManager() # router.APIRouterV1(api_resource_mgr, api) # node.register_service(api_resource_mgr) engine = services[harness.ENGINE_SERVICE_NAME] api = services['api'] return {'node': node, 'engine': engine, 'data': data, 'api': api}
def setup_config(with_fake_datasource=True, node_id='testnode', same_partition_as_node=None, api=True, policy=True, datasources=True): """Setup DseNode for testing. :param: services is an array of DataServices :param: api is a dictionary mapping api name to API model instance """ config.set_config_defaults() # Load the fake driver. cfg.CONF.set_override('drivers', ['congress.tests.fake_datasource.FakeDataSource']) if same_partition_as_node is None: node = helper.make_dsenode_new_partition(node_id) else: node = helper.make_dsenode_same_partition(same_partition_as_node, node_id) if datasources: cfg.CONF.set_override('datasources', True) with mock.patch.object(periodics, 'PeriodicWorker', autospec=True): services = harness.create2(existing_node=node, policy_engine=policy, api=api, datasources=datasources) data = None if with_fake_datasource: data = fake_datasource.FakeDataSource('data') # FIXME(ekcs): this is a hack to prevent the synchronizer from # attempting to delete this DSD because it's not in DB data.type = 'no_sync_datasource_driver' node.register_service(data) engine_service = None library_service = None api_service = None if policy: engine_service = services[api_base.ENGINE_SERVICE_ID] library_service = services[api_base.LIBRARY_SERVICE_ID] if api: api_service = services['api'] if datasources: ds_manager = services['ds_manager'] return { 'node': node, 'engine': engine_service, 'library': library_service, 'data': data, 'api': api_service, 'ds_manager': ds_manager }
def setUp(self): super(TestDsePerformance, self).setUp() cfg.CONF.set_override( 'drivers', [('congress.tests.datasources.performance_datasource_driver' '.PerformanceTestDriver')]) self.cage = helper.make_dsenode_new_partition("perf") harness.create2(existing_node=self.cage) self.api = { 'policy': self.cage.service_object('api-policy'), 'rule': self.cage.service_object('api-rule'), 'table': self.cage.service_object('api-table'), 'row': self.cage.service_object('api-row'), 'datasource': self.cage.service_object('api-datasource'), 'status': self.cage.service_object('api-status'), 'schema': self.cage.service_object('api-schema') } self.engine = self.cage.service_object(api_base.ENGINE_SERVICE_ID)
def congress_app_factory(global_conf, **local_conf): # global_conf only accepts an iteratable value as its dict value flags_dict = json.loads(global_conf['flags']) services = harness.create2( node_id=global_conf['node_id'], bus_id=global_conf['bus_id'], policy_engine=flags_dict['policy_engine'], api=flags_dict['api'], datasources=flags_dict['datasources']) return application.ApiApplication(services['api_service'])
def setup_config(with_fake_datasource=True, node_id='testnode', same_partition_as_node=None, api=True, policy=True, datasources=True, with_fake_json_ingester=False): """Setup DseNode for testing. :param: services is an array of DataServices :param: api is a dictionary mapping api name to API model instance """ config.set_config_defaults() # Load the fake driver. cfg.CONF.set_override( 'drivers', ['congress.tests.fake_datasource.FakeDataSource']) if same_partition_as_node is None: node = helper.make_dsenode_new_partition(node_id) else: node = helper.make_dsenode_same_partition( same_partition_as_node, node_id) if datasources: cfg.CONF.set_override('datasources', True) with mock.patch.object(periodics, 'PeriodicWorker', autospec=True): services = harness.create2( existing_node=node, policy_engine=policy, api=api, datasources=datasources) data = None if with_fake_datasource: data = fake_datasource.FakeDataSource('data') # FIXME(ekcs): this is a hack to prevent the synchronizer from # attempting to delete this DSD because it's not in DB data.type = 'no_sync_datasource_driver' node.register_service(data) ingester = None if with_fake_json_ingester: ingester = fake_datasource.FakeJsonIngester() node.register_service(ingester) engine_service = None library_service = None api_service = None if policy: engine_service = services[api_base.ENGINE_SERVICE_ID] library_service = services[api_base.LIBRARY_SERVICE_ID] if api: api_service = services['api'] if datasources: ds_manager = services['ds_manager'] return {'node': node, 'engine': engine_service, 'library': library_service, 'data': data, 'api': api_service, 'ds_manager': ds_manager, 'json_ingester': ingester}