def setUp(self): # create DSE and add vm-placement engine and fake datasource super(TestSetPolicy, self).setUp() self.cage = d6cage.d6Cage() config = { "vmplace": {"module": "congress/policy_engines/vm_placement.py"}, "fake": {"poll_time": 0, "module": "congress/tests/fake_datasource.py"}, } harness.load_data_service("vmplace", config["vmplace"], self.cage, helper.root_path(), 1) harness.load_data_service("fake", config["fake"], self.cage, helper.root_path(), 2) self.vmplace = self.cage.service_object("vmplace") self.vmplace.debug_mode() self.fake = self.cage.service_object("fake")
def setUp(self): super(TestRowModel, self).setUp() # Here we load the fake driver cfg.CONF.set_override( 'drivers', ['congress.tests.fake_datasource.FakeDataSource']) self.cage = harness.create(helper.root_path()) self.datasource_mgr = datasource_manager.DataSourceManager self.datasource_mgr.validate_configured_drivers() req = {'driver': 'fake_datasource', 'name': 'fake_datasource'} req['config'] = {'auth_url': 'foo', 'username': '******', 'password': '******', 'tenant_name': 'foo'} self.datasource_mgr.add_datasource(req) self.datasource = self.cage.getservice(name='fake_datasource', type_='datasource_driver') self.engine = self.cage.service_object('engine') self.api_rule = self.cage.service_object('api-rule') self.policy_model = self.cage.service_object('api-policy') self.row_model = row_model.RowModel( "row_model", {}, policy_engine=self.engine, datasource_mgr=self.datasource_mgr)
def setUp(self): super(TestTableModel, self).setUp() # Here we load the fake driver cfg.CONF.set_override( 'drivers', ['congress.tests.fake_datasource.FakeDataSource']) # NOTE(masa): this set of tests, tests to deeply. We don't have # any tests currently testing cage. Once we do we should mock out # cage so we don't have to create one here. self.cage = harness.create(helper.root_path()) self.ds_mgr = datasource_manager.DataSourceManager self.ds_mgr.validate_configured_drivers() req = {'driver': 'fake_datasource', 'name': 'fake_datasource'} req['config'] = {'auth_url': 'foo', 'username': '******', 'password': '******', 'tenant_name': 'foo'} self.datasource = self.ds_mgr.add_datasource(req) self.engine = self.cage.service_object('engine') self.api_rule = self.cage.service_object('api-rule') self.table_model = table_model.TableModel("table_model", {}, policy_engine=self.engine, datasource_mgr=self.ds_mgr)
def start_pe(self, num, port): self.outfiles[num] = tempfile.NamedTemporaryFile( mode='a+', suffix='.out', prefix='congress-pe%d-%d-' % (num, port), dir='/tmp') self.errfiles[num] = tempfile.NamedTemporaryFile( mode='a+', suffix='.err', prefix='congress-pe%d-%d-' % (num, port), dir='/tmp') args = [ sys.executable, 'congress/server/congress_server.py', '--node-id', 'node_%d' % num, '--api', '--policy-engine', '--config-file', 'congress/tests/etc/congress.conf.test.ha_pe%d' % num ] pe = subprocess.Popen(args, stdout=self.outfiles[num], stderr=self.outfiles[num], cwd=helper.root_path()) self.addCleanup(pe.kill) pe = self.client(port) try: helper.retry_check_function_return_value( lambda: pe.get().status_code, 200) except tenacity.RetryError: out = self.read_output_file(self.outfiles[num]) LOG.error('PE%d failed to start. Process output:\n%s' % (num, out)) raise return pe
def start_pe(self, num, port): self.outfiles[num] = tempfile.NamedTemporaryFile( mode='a+', suffix='.out', prefix='congress-pe%d-%d-' % (num, port), dir='/tmp') self.errfiles[num] = tempfile.NamedTemporaryFile( mode='a+', suffix='.err', prefix='congress-pe%d-%d-' % (num, port), dir='/tmp') args = [sys.executable, 'congress/server/congress_server.py', '--node-id', 'node_%d' % num, '--api', '--policy-engine', '--config-file', 'congress/tests/etc/congress.conf.test.ha_pe%d' % num] pe = subprocess.Popen(args, stdout=self.outfiles[num], stderr=self.outfiles[num], cwd=helper.root_path()) self.addCleanup(pe.kill) pe = self.client(port) try: helper.retry_check_function_return_value( lambda: pe.get().status_code, 200) except tenacity.RetryError: out = self.read_output_file(self.outfiles[num]) LOG.error('PE%d failed to start. Process output:\n%s' % (num, out)) raise return pe
def setUp(self): super(BenchmarkDatasource, self).setUp() config = {'benchmark': { 'module': helper.data_module_path('benchmark_driver.py'), 'poll_time': 0}} cage = harness.create(helper.root_path(), None, config) engine = cage.service_object('engine') api = {'policy': cage.service_object('api-policy'), 'rule': cage.service_object('api-rule'), 'table': cage.service_object('api-table'), 'row': cage.service_object('api-row'), 'datasource': cage.service_object('api-datasource'), 'status': cage.service_object('api-status'), 'schema': cage.service_object('api-schema')} helper.retry_check_subscriptions(engine, [(api['rule'].name, 'policy-update')]) helper.retry_check_subscribers(api['rule'], [(engine.name, 'policy-update')]) self.assertTrue('benchmark' in cage.services) datasource = cage.service_object('benchmark') table_name = datasource.BENCHTABLE self.assertEqual(datasource.state, {}) # add a subscriber to ensure the updates end up in datasource.dataPath pubdata = datasource.pubdata.setdefault(table_name, dataobj.pubData(table_name)) pubdata.addsubscriber(self.__class__.__name__, "push", "") self.assertTrue(datasource.pubdata[table_name]) self.cage = cage self.engine = engine self.api = api self.table_name = table_name self.datasource = datasource
def setUp(self): super(TestDataSourceManager, self).setUp() cfg.CONF.set_override( 'drivers', ['congress.tests.fake_datasource.FakeDataSource']) self.datasource_mgr = datasource_manager.DataSourceManager self.datasource_mgr.validate_configured_drivers() self.cage = harness.create(helper.root_path(), helper.state_path())
def setUp(self): super(TestDataSourceManager, self).setUp() cfg.CONF.set_override( 'drivers', ['congress.tests.fake_datasource.FakeDataSource']) self.datasource_mgr = datasource_manager.DataSourceManager self.datasource_mgr.validate_configured_drivers() self.cage = harness.create(helper.root_path())
def setUp(self): super(TestDsePerformance, self).setUp() self.cage = harness.create(helper.root_path(), config_override={}) self.api = {'policy': self.cage.service_object('api-policy'), 'rule': self.cage.service_object('api-rule'), 'table': self.cage.service_object('api-table'), 'row': self.cage.service_object('api-row'), 'datasource': self.cage.service_object('api-datasource'), 'status': self.cage.service_object('api-status'), 'schema': self.cage.service_object('api-schema')} self.engine = self.cage.service_object('engine')
def setUp(self): super(TestDsePerformance, self).setUp() self.cage = harness.create(helper.root_path(), config_override={}) self.api = { "policy": self.cage.service_object("api-policy"), "rule": self.cage.service_object("api-rule"), "table": self.cage.service_object("api-table"), "row": self.cage.service_object("api-row"), "datasource": self.cage.service_object("api-datasource"), "status": self.cage.service_object("api-status"), "schema": self.cage.service_object("api-schema"), } self.engine = self.cage.service_object("engine")
def setUp(self): # create DSE and add vm-placement engine and fake datasource super(TestSetPolicy, self).setUp() self.cage = d6cage.d6Cage() config = { 'vmplace': { 'module': "congress/policy_engines/vm_placement.py" }, 'fake': { 'poll_time': 0, 'module': "congress/tests/fake_datasource.py" } } harness.load_data_service("vmplace", config['vmplace'], self.cage, helper.root_path(), 1) harness.load_data_service("fake", config['fake'], self.cage, helper.root_path(), 2) self.vmplace = self.cage.service_object('vmplace') self.vmplace.debug_mode() self.fake = self.cage.service_object('fake')
def setUp(self): super(TestRuleModel, self).setUp() # Here we load the fake driver cfg.CONF.set_override( 'drivers', ['congress.tests.fake_datasource.FakeDataSource']) self.cage = harness.create(helper.root_path()) self.engine = self.cage.service_object('engine') self.rule_model = rule_model.RuleModel("rule_model", {}, policy_engine=self.engine) self.context = {'policy_id': 'action'} self._add_test_rule()
def setUp(self): super(TestPolicyModel, self).setUp() # Here we load the fake driver cfg.CONF.set_override( 'drivers', ['congress.tests.fake_datasource.FakeDataSource']) self.cage = harness.create(helper.root_path()) self.engine = self.cage.service_object('engine') self.rule_api = self.cage.service_object('api-rule') self.policy_model = policy_model.PolicyModel("policy_model", {}, policy_engine=self.engine) self.initial_policies = set(self.engine.policy_names()) self._add_test_policy()
def setUp(self): """Setup tests that use multiple mock neutron instances.""" super(TestCongress, self).setUp() # create neutron mock and tell cage to use that mock # https://code.google.com/p/pymox/wiki/MoxDocumentation mock_factory = mox.Mox() neutron_mock = mock_factory.CreateMock( neutronclient.v2_0.client.Client) neutron_mock2 = mock_factory.CreateMock( neutronclient.v2_0.client.Client) override = {} override['neutron'] = {'client': neutron_mock, 'poll_time': 0} override['neutron2'] = {'client': neutron_mock2, 'poll_time': 0} override['nova'] = {'poll_time': 0} cage = harness.create(helper.root_path(), helper.state_path(), helper.datasource_config_path(), override) engine = cage.service_object('engine') api = {'policy': cage.service_object('api-policy'), 'rule': cage.service_object('api-rule'), 'table': cage.service_object('api-table'), 'row': cage.service_object('api-row'), 'datasource': cage.service_object('api-datasource'), 'status': cage.service_object('api-status'), 'schema': cage.service_object('api-schema')} # Turn off schema checking engine.module_schema = None # initialize neutron_mocks network1 = test_neutron.network_response port_response = test_neutron.port_response router_response = test_neutron.router_response sg_group_response = test_neutron.security_group_response neutron_mock.list_networks().InAnyOrder().AndReturn(network1) neutron_mock.list_ports().InAnyOrder().AndReturn(port_response) neutron_mock.list_routers().InAnyOrder().AndReturn(router_response) neutron_mock.list_security_groups().InAnyOrder().AndReturn( sg_group_response) neutron_mock2.list_networks().InAnyOrder().AndReturn(network1) neutron_mock2.list_ports().InAnyOrder().AndReturn(port_response) neutron_mock2.list_routers().InAnyOrder().AndReturn(router_response) neutron_mock2.list_security_groups().InAnyOrder().AndReturn( sg_group_response) mock_factory.ReplayAll() self.cage = cage self.engine = engine self.api = api
def setUp(self): super(TestDsePerformance, self).setUp() self.cage = harness.create(helper.root_path(), helper.state_path(), config_override={}) self.api = { 'policy': self.cage.service_object('api-policy'), 'rule': self.cage.service_object('api-rule'), 'table': self.cage.service_object('api-table'), 'row': self.cage.service_object('api-row'), 'datasource': self.cage.service_object('api-datasource'), 'status': self.cage.service_object('api-status'), 'schema': self.cage.service_object('api-schema') } self.engine = self.cage.service_object('engine')
def setUp(self): super(BenchmarkDatasource, self).setUp() config = { 'benchmark': { 'module': helper.data_module_path('benchmark_driver.py'), 'poll_time': 0 } } cage = harness.create(helper.root_path(), helper.state_path(), None, config) engine = cage.service_object('engine') api = { 'policy': cage.service_object('api-policy'), 'rule': cage.service_object('api-rule'), 'table': cage.service_object('api-table'), 'row': cage.service_object('api-row'), 'datasource': cage.service_object('api-datasource'), 'status': cage.service_object('api-status'), 'schema': cage.service_object('api-schema') } helper.retry_check_subscriptions(engine, [(api['rule'].name, 'policy-update')]) helper.retry_check_subscribers(api['rule'], [(engine.name, 'policy-update')]) self.assertTrue('benchmark' in cage.services) datasource = cage.service_object('benchmark') table_name = datasource.BENCHTABLE self.assertEqual(datasource.state, {}) # add a subscriber to ensure the updates end up in datasource.dataPath pubdata = datasource.pubdata.setdefault(table_name, dataobj.pubData(table_name)) pubdata.addsubscriber(self.__class__.__name__, "push", "") self.assertTrue(datasource.pubdata[table_name]) self.cage = cage self.engine = engine self.api = api self.table_name = table_name self.datasource = datasource
def create_services(self): if cfg.CONF.distributed_architecture: messaging_config = helper.generate_messaging_config() # TODO(masa): following initializing DseNode will be just a fake # until API model and Policy Engine support rpc. After these # support rpc, pub/sub and so on, replaced with each class. engine = dse_node.DseNode(messaging_config, 'engine', []) rule_api = dse_node.DseNode(messaging_config, 'api-rule', []) policy_api = dse_node.DseNode(messaging_config, 'policy_model', []) for n in (engine, rule_api, policy_api): n.start() else: cage = harness.create(helper.root_path()) engine = cage.service_object('engine') rule_api = cage.service_object('api-rule') policy_api = policy_model.PolicyModel("policy_model", {}, policy_engine=engine) return engine, rule_api, policy_api
def setUp(self): super(TestDriverModel, self).setUp() cfg.CONF.set_override( 'drivers', ['congress.tests.fake_datasource.FakeDataSource']) self.cage = harness.create(helper.root_path()) self.datasource_mgr = datasource_manager.DataSourceManager self.datasource_mgr.validate_configured_drivers() req = {'driver': 'fake_datasource', 'name': 'fake_datasource'} req['config'] = {'auth_url': 'foo', 'username': '******', 'password': '******', 'tenant_name': 'foo'} self.datasource = self.datasource_mgr.add_datasource(req) self.api_system = self.cage.service_object('api-system') self.driver_model = ( driver_model.DatasourceDriverModel( "driver-model", {}, datasource_mgr=self.datasource_mgr) )
def setUp(self): super(TestStatusModel, self).setUp() # Here we load the fake driver cfg.CONF.set_override( 'drivers', ['congress.tests.fake_datasource.FakeDataSource']) # NOTE(arosen): this set of tests, tests to deeply. We don't have # any tests currently testing cage. Once we do we should mock out # cage so we don't have to create one here. self.cage = harness.create(helper.root_path(), helper.state_path()) self.datasource_mgr = datasource_manager.DataSourceManager self.datasource_mgr.validate_configured_drivers() req = {'driver': 'fake_datasource', 'name': 'fake_datasource'} req['config'] = {'auth_url': 'foo', 'username': '******', 'password': '******', 'tenant_name': 'foo'} self.datasource = self.datasource_mgr.add_datasource(req) engine = self.cage.service_object('engine') self.status_model = status_model.StatusModel("status_schema", {}, policy_engine=engine)
def setUp(self): """Setup tests that use multiple mock neutron instances.""" super(TestCongress, self).setUp() # create neutron mock and tell cage to use that mock # https://code.google.com/p/pymox/wiki/MoxDocumentation mock_factory = mox.Mox() neutron_mock = mock_factory.CreateMock( neutronclient.v2_0.client.Client) neutron_mock2 = mock_factory.CreateMock( neutronclient.v2_0.client.Client) override = {} override['neutron'] = {'poll_time': 0} override['neutron2'] = {'poll_time': 0} override['nova'] = {'poll_time': 0} cage = harness.create(helper.root_path(), helper.state_path(), helper.datasource_config_path(), override) engine = cage.service_object('engine') api = { 'policy': cage.service_object('api-policy'), 'rule': cage.service_object('api-rule'), 'table': cage.service_object('api-table'), 'row': cage.service_object('api-row'), 'datasource': cage.service_object('api-datasource'), 'status': cage.service_object('api-status'), 'schema': cage.service_object('api-schema') } # monkey patch cage.service_object('neutron').neutron = neutron_mock cage.service_object('neutron2').neutron = neutron_mock2 # delete all policies that aren't builtin, so we have clean slate names = set(engine.policy_names()) - engine.builtin_policy_names for name in names: try: api['policy'].delete_item(name, {}) except KeyError: pass # Turn off schema checking engine.module_schema = None # initialize neutron_mocks network1 = test_neutron.network_response port_response = test_neutron.port_response router_response = test_neutron.router_response sg_group_response = test_neutron.security_group_response neutron_mock.list_networks().InAnyOrder().AndReturn(network1) neutron_mock.list_ports().InAnyOrder().AndReturn(port_response) neutron_mock.list_routers().InAnyOrder().AndReturn(router_response) neutron_mock.list_security_groups().InAnyOrder().AndReturn( sg_group_response) neutron_mock2.list_networks().InAnyOrder().AndReturn(network1) neutron_mock2.list_ports().InAnyOrder().AndReturn(port_response) neutron_mock2.list_routers().InAnyOrder().AndReturn(router_response) neutron_mock2.list_security_groups().InAnyOrder().AndReturn( sg_group_response) mock_factory.ReplayAll() self.cage = cage self.engine = engine self.api = api
def setUp(self): """Setup tests that use multiple mock neutron instances.""" super(TestCongress, self).setUp() # create neutron mock and tell cage to use that mock # https://code.google.com/p/pymox/wiki/MoxDocumentation mock_factory = mox.Mox() neutron_mock = mock_factory.CreateMock( neutronclient.v2_0.client.Client) neutron_mock2 = mock_factory.CreateMock( neutronclient.v2_0.client.Client) config_override = { 'neutron2': { 'username': '******', 'tenant_name': 'demo', 'password': '******', 'auth_url': 'http://127.0.0.1:5000/v2.0', 'module': 'datasources/neutron_driver.py' }, 'nova': { 'username': '******', 'tenant_name': 'demo', 'password': '******', 'auth_url': 'http://127.0.0.1:5000/v2.0', 'module': 'datasources/nova_driver.py' }, 'neutron': { 'username': '******', 'tenant_name': 'demo', 'password': '******', 'auth_url': 'http://127.0.0.1:5000/v2.0', 'module': 'datasources/neutron_driver.py' } } cage = harness.create(helper.root_path(), helper.state_path(), config_override) # Disable synchronizer because the this test creates # datasources without also inserting them into the database. # The synchronizer would delete these datasources. cage.service_object('synchronizer').set_poll_time(0) engine = cage.service_object('engine') api = { 'policy': cage.service_object('api-policy'), 'rule': cage.service_object('api-rule'), 'table': cage.service_object('api-table'), 'row': cage.service_object('api-row'), 'datasource': cage.service_object('api-datasource'), 'status': cage.service_object('api-status'), 'schema': cage.service_object('api-schema') } config = { 'username': '******', 'auth_url': 'http://127.0.0.1:5000/v2.0', 'tenant_name': 'demo', 'password': '******', 'module': 'datasources/neutron_driver.py', 'poll_time': 0 } # FIXME(arosen): remove all this code # monkey patch engine.create_policy('neutron') engine.create_policy('neutron2') engine.create_policy('nova') harness.load_data_service('neutron', config, cage, os.path.join(helper.root_path(), "congress"), 1) service = cage.service_object('neutron') engine.set_schema('neutron', service.get_schema()) harness.load_data_service('neutron2', config, cage, os.path.join(helper.root_path(), "congress"), 2) engine.set_schema('neutron2', service.get_schema()) config['module'] = 'datasources/nova_driver.py' harness.load_data_service('nova', config, cage, os.path.join(helper.root_path(), "congress"), 3) engine.set_schema('nova', service.get_schema()) cage.service_object('neutron').neutron = neutron_mock cage.service_object('neutron2').neutron = neutron_mock2 # delete all policies that aren't builtin, so we have clean slate names = set(engine.policy_names()) - engine.builtin_policy_names for name in names: try: api['policy'].delete_item(name, {}) except KeyError: pass # Turn off schema checking engine.module_schema = None # initialize neutron_mocks network1 = test_neutron.network_response port_response = test_neutron.port_response router_response = test_neutron.router_response sg_group_response = test_neutron.security_group_response neutron_mock.list_networks().InAnyOrder().AndReturn(network1) neutron_mock.list_ports().InAnyOrder().AndReturn(port_response) neutron_mock.list_routers().InAnyOrder().AndReturn(router_response) neutron_mock.list_security_groups().InAnyOrder().AndReturn( sg_group_response) neutron_mock2.list_networks().InAnyOrder().AndReturn(network1) neutron_mock2.list_ports().InAnyOrder().AndReturn(port_response) neutron_mock2.list_routers().InAnyOrder().AndReturn(router_response) neutron_mock2.list_security_groups().InAnyOrder().AndReturn( sg_group_response) mock_factory.ReplayAll() self.cage = cage self.engine = engine self.api = api
def setUp(self): """Setup tests that use multiple mock neutron instances.""" super(TestCongress, self).setUp() # create neutron mock and tell cage to use that mock # https://code.google.com/p/pymox/wiki/MoxDocumentation mock_factory = mox.Mox() neutron_mock = mock_factory.CreateMock( neutronclient.v2_0.client.Client) neutron_mock2 = mock_factory.CreateMock( neutronclient.v2_0.client.Client) config_override = {'neutron2': {'username': '******', 'tenant_name': 'demo', 'password': '******', 'auth_url': 'http://127.0.0.1:5000/v2.0', 'module': 'datasources/neutron_driver.py'}, 'nova': {'username': '******', 'tenant_name': 'demo', 'password': '******', 'auth_url': 'http://127.0.0.1:5000/v2.0', 'module': 'datasources/nova_driver.py'}, 'neutron': {'username': '******', 'tenant_name': 'demo', 'password': '******', 'auth_url': 'http://127.0.0.1:5000/v2.0', 'module': 'datasources/neutron_driver.py'}} cage = harness.create(helper.root_path(), config_override) # Disable synchronizer because the this test creates # datasources without also inserting them into the database. # The synchronizer would delete these datasources. cage.service_object('synchronizer').set_poll_time(0) engine = cage.service_object('engine') api = {'policy': cage.service_object('api-policy'), 'rule': cage.service_object('api-rule'), 'table': cage.service_object('api-table'), 'row': cage.service_object('api-row'), 'datasource': cage.service_object('api-datasource'), 'status': cage.service_object('api-status'), 'schema': cage.service_object('api-schema')} config = {'username': '******', 'auth_url': 'http://127.0.0.1:5000/v2.0', 'tenant_name': 'demo', 'password': '******', 'module': 'datasources/neutron_driver.py', 'poll_time': 0} # FIXME(arosen): remove all this code # monkey patch engine.create_policy('neutron', kind=datalog_base.DATASOURCE_POLICY_TYPE) engine.create_policy('neutron2', kind=datalog_base.DATASOURCE_POLICY_TYPE) engine.create_policy('nova', kind=datalog_base.DATASOURCE_POLICY_TYPE) harness.load_data_service( 'neutron', config, cage, os.path.join(helper.root_path(), "congress"), 1) service = cage.service_object('neutron') engine.set_schema('neutron', service.get_schema()) harness.load_data_service( 'neutron2', config, cage, os.path.join(helper.root_path(), "congress"), 2) engine.set_schema('neutron2', service.get_schema()) config['module'] = 'datasources/nova_driver.py' harness.load_data_service( 'nova', config, cage, os.path.join(helper.root_path(), "congress"), 3) engine.set_schema('nova', service.get_schema()) cage.service_object('neutron').neutron = neutron_mock cage.service_object('neutron2').neutron = neutron_mock2 # delete all policies that aren't builtin, so we have clean slate names = set(engine.policy_names()) - engine.builtin_policy_names for name in names: try: api['policy'].delete_item(name, {}) except KeyError: pass # Turn off schema checking engine.module_schema = None # initialize neutron_mocks network1 = test_neutron.network_response port_response = test_neutron.port_response router_response = test_neutron.router_response sg_group_response = test_neutron.security_group_response neutron_mock.list_networks().InAnyOrder().AndReturn(network1) neutron_mock.list_ports().InAnyOrder().AndReturn(port_response) neutron_mock.list_routers().InAnyOrder().AndReturn(router_response) neutron_mock.list_security_groups().InAnyOrder().AndReturn( sg_group_response) neutron_mock2.list_networks().InAnyOrder().AndReturn(network1) neutron_mock2.list_ports().InAnyOrder().AndReturn(port_response) neutron_mock2.list_routers().InAnyOrder().AndReturn(router_response) neutron_mock2.list_security_groups().InAnyOrder().AndReturn( sg_group_response) mock_factory.ReplayAll() self.cage = cage self.engine = engine self.api = api