class BurstContollerTest(BaseControllersTest): """ Unit tests for burst_controller """ def setUp(self): """ Sets up the environment for testing; creates a `BurstController` """ BaseControllersTest.init(self) self.burst_c = BurstController() def tearDown(self): """ Cleans up the environment after testing is done """ self.cleanup() self.clean_database() def test_index(self): """ Test that index returns a dict with all required keys. Also check that the default portlets are populated, with only the first being the TimeSeries portlet and the rest are empty. """ result_dict = self.burst_c.index() self.assertTrue('burst_list' in result_dict and result_dict['burst_list'] == []) self.assertTrue('available_metrics' in result_dict and isinstance(result_dict['available_metrics'], list)) self.assertTrue('portletList' in result_dict and isinstance(result_dict['portletList'], list)) self.assertEqual(result_dict[common.KEY_SECTION], "burst") self.assertTrue('burstConfig' in result_dict and isinstance(result_dict['burstConfig'], BurstConfiguration)) portlets = json.loads(result_dict['selectedPortlets']) portlet_id = dao.get_portlet_by_identifier("TimeSeries").id for tab_idx, tab in enumerate(portlets): for index_in_tab, value in enumerate(tab): if tab_idx == 0 and index_in_tab == 0: self.assertEqual(value, [portlet_id, "TimeSeries"]) else: self.assertEqual(value, [-1, "None"]) self.assertTrue(result_dict['draw_hidden_ranges']) def test_load_burst_history(self): """ Create two burst, load the burst and check that we get back the same stored bursts. """ self._store_burst(self.test_project.id, 'started', {'test': 'test'}, 'burst1') burst = self._store_burst(self.test_project.id, 'started', {'test': 'test'}, 'burst2') cherrypy.session[common.KEY_BURST_CONFIG] = burst result_dict = self.burst_c.load_burst_history() burst_history = result_dict['burst_list'] self.assertEqual(len(burst_history), 2) for burst in burst_history: self.assertTrue(burst.name in ('burst1', 'burst2')) def test_get_selected_burst(self): """ Create burst, add it to session, then check that get_selected_burst return the same burst. Also check that for an unstored entity we get back 'None' """ burst_entity = BurstConfiguration(self.test_project.id, 'started', {}, 'burst1') cherrypy.session[common.KEY_BURST_CONFIG] = burst_entity stored_id = self.burst_c.get_selected_burst() self.assertEqual(stored_id, 'None') burst_entity = dao.store_entity(burst_entity) cherrypy.session[common.KEY_BURST_CONFIG] = burst_entity stored_id = self.burst_c.get_selected_burst() self.assertEqual(str(stored_id), str(burst_entity.id)) def test_get_portlet_configurable_interface(self): """ Look up that an AdapterConfiguration is returned for the default portlet configuration, if we look at index (0, 0) where TimeSeries portlet should be default. """ self.burst_c.index() result = self.burst_c.get_portlet_configurable_interface(0) self.assertTrue(common.KEY_PARAMETERS_CONFIG in result) self.assertFalse(result[common.KEY_PARAMETERS_CONFIG]) adapter_config = result['adapters_list'] # Default TimeSeries portlet should be available, so we expect # adapter_config to be a list of AdapterConfiguration with one element self.assertEqual(len(adapter_config), 1) self.assertTrue(isinstance(adapter_config[0], AdapterConfiguration)) def test_portlet_tab_display(self): """ Update the default portlet configuration, by storing a TimeSeries portlet for all postions. Then check that we get the same configuration. """ self.burst_c.index() portlet_id = dao.get_portlet_by_identifier("TimeSeries").id one_tab = [[portlet_id, "TimeSeries"] for _ in range(NUMBER_OF_PORTLETS_PER_TAB)] full_tabs = [one_tab for _ in range(BurstConfiguration.nr_of_tabs)] data = {'tab_portlets_list': json.dumps(full_tabs)} result = self.burst_c.portlet_tab_display(**data) selected_portlets = result['portlet_tab_list'] for entry in selected_portlets: self.assertEqual(entry.id, portlet_id) def test_get_configured_portlets_no_session(self): """ Test that if we have no burst stored in session, an empty portlet list is reduced. """ result = self.burst_c.get_configured_portlets() self.assertTrue('portlet_tab_list' in result) self.assertTrue(result['portlet_tab_list'] == []) def test_get_configured_portlets_default(self): """ Check that the default configuration holds one portlet and it's identifier is 'TimeSeries'. """ self.burst_c.index() result = self.burst_c.get_configured_portlets() self.assertTrue('portlet_tab_list' in result) portlets_list = result['portlet_tab_list'] self.assertEqual(len(portlets_list), 1) self.assertTrue(portlets_list[0].algorithm_identifier == 'TimeSeries') def test_get_portlet_session_configuration(self): """ Test that the default portlet session sonciguration is generated as expected, with a default TimeSeries portlet and rest empty. """ self.burst_c.index() result = json.loads(self.burst_c.get_portlet_session_configuration()) portlet_id = dao.get_portlet_by_identifier("TimeSeries").id for tab_idx, tab in enumerate(result): for index_in_tab, value in enumerate(tab): if tab_idx == 0 and index_in_tab == 0: self.assertEqual(value, [portlet_id, "TimeSeries"]) else: self.assertEqual(value, [-1, "None"]) def test_save_parameters_no_relaunch(self): """ Test the save parameters for the default TimeSeries portlet and pass an empty dictionary as the 'new' data. In this case a relaunch should not be required. """ self.burst_c.index() self.assertEqual('noRelaunch', self.burst_c.save_parameters(0, portlet_parameters="{}")) def test_rename_burst(self): """ Create and store a burst, then rename it and check that it works as expected. """ burst = self._store_burst(self.test_project.id, 'started', {'test': 'test'}, 'burst1') self.burst_c.rename_burst(burst.id, "test_new_burst_name") renamed_burst = dao.get_burst_by_id(burst.id) self.assertEqual(renamed_burst.name, "test_new_burst_name") def test_launch_burst(self): """ Launch a burst and check that it finishes correctly and before timeout (100) """ self.burst_c.index() connectivity = self._burst_create_connectivity() launch_params = copy.deepcopy(SIMULATOR_PARAMETERS) launch_params['connectivity'] = connectivity.gid launch_params['simulation_length'] = '10' launch_params = {"simulator_parameters": json.dumps(launch_params)} burst_id = json.loads(self.burst_c.launch_burst("new", "test_burst", **launch_params))['id'] waited = 1 timeout = 100 burst_config = dao.get_burst_by_id(burst_id) while burst_config.status == BurstConfiguration.BURST_RUNNING and waited <= timeout: sleep(0.5) waited += 0.5 burst_config = dao.get_burst_by_id(burst_config.id) if waited > timeout: self.fail("Timed out waiting for simulations to finish.") if burst_config.status != BurstConfiguration.BURST_FINISHED: BurstService().stop_burst(burst_config) self.fail("Burst should have finished successfully.") def test_load_burst(self): """ Test loading and burst and checking you get expected dictionary. """ self.burst_c.index() burst = self._store_burst(self.test_project.id, 'started', {'test': 'test'}, 'burst1') result = json.loads(self.burst_c.load_burst(burst.id)) self.assertEqual(result["status"], "started") self.assertEqual(result['group_gid'], None) self.assertEqual(result['selected_tab'], 0) def test_load_burst_removed(self): """ Add burst to session, then remove burst from database. Try to load burst and check that it will raise exception and remove it from session. """ burst = self._store_burst(self.test_project.id, 'started', {'test': 'test'}, 'burst1') cherrypy.session[common.KEY_BURST_CONFIG] = burst burst_id = burst.id BurstService().cancel_or_remove_burst(burst_id) self.assertRaises(Exception, self.burst_c.load_burst, burst_id) self.assertTrue(common.KEY_BURST_CONFIG not in cherrypy.session) def test_remove_burst_not_session(self): """ Test removing a burst that is not the one currently stored in session. SHould just remove and return a 'done' string. """ burst = self._store_burst(self.test_project.id, 'finished', {'test': 'test'}, 'burst1') cherrypy.session[common.KEY_BURST_CONFIG] = burst another_burst = self._store_burst(self.test_project.id, 'finished', {'test': 'test'}, 'burst1') result = self.burst_c.cancel_or_remove_burst(another_burst.id) self.assertEqual(result, 'done') def test_remove_burst_in_session(self): """ Test that if we remove the burst that is the current one from the session, we get a 'reset-new' string as result. """ burst = self._store_burst(self.test_project.id, 'finished', {'test': 'test'}, 'burst1') cherrypy.session[common.KEY_BURST_CONFIG] = burst result = self.burst_c.cancel_or_remove_burst(burst.id) self.assertEqual(result, 'reset-new') def _store_burst(self, proj_id, status, sim_config, name): """ Create and store a burst entity, for the project given project_id, having the given status and simulator parames config, under the given name. """ burst = BurstConfiguration(proj_id, status, sim_config, name) burst.prepare_before_save() return dao.store_entity(burst) def _burst_create_connectivity(self): """ Create a connectivity that will be used in "non-dummy" burst launches (with the actual simulator). TODO: This is duplicate code from burstservice_test. Should go into the 'generic' DataType factory once that is done. """ meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe", DataTypeMetaData.KEY_STATE: "RAW_DATA"} algorithm = FlowService().get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS) self.operation = model.Operation(self.test_user.id, self.test_project.id, algorithm.id, json.dumps(''), meta=json.dumps(meta), status=model.STATUS_STARTED) self.operation = dao.store_entity(self.operation) storage_path = FilesHelper().get_project_folder(self.test_project, str(self.operation.id)) connectivity = Connectivity(storage_path=storage_path) connectivity.weights = numpy.ones((74, 74)) connectivity.centres = numpy.ones((74, 3)) adapter_instance = StoreAdapter([connectivity]) OperationService().initiate_prelaunch(self.operation, adapter_instance, {}) return connectivity
class BurstContollerTest(BaseControllersTest): """ Unit tests for burst_controller """ def setUp(self): """ Sets up the environment for testing; creates a `BurstController` """ BaseControllersTest.init(self) self.burst_c = BurstController() def tearDown(self): """ Cleans up the environment after testing is done """ self.cleanup() self.clean_database() def test_index(self): """ Test that index returns a dict with all required keys. Also check that the default portlets are populated, with only the first being the TimeSeries portlet and the rest are empty. """ result_dict = self.burst_c.index() self.assertTrue('burst_list' in result_dict and result_dict['burst_list'] == []) self.assertTrue('available_metrics' in result_dict and isinstance(result_dict['available_metrics'], list)) self.assertTrue('portletList' in result_dict and isinstance(result_dict['portletList'], list)) self.assertEqual(result_dict[common.KEY_SECTION], "burst") self.assertTrue( 'burstConfig' in result_dict and isinstance(result_dict['burstConfig'], BurstConfiguration)) portlets = json.loads(result_dict['selectedPortlets']) portlet_id = dao.get_portlet_by_identifier("TimeSeries").id for tab_idx, tab in enumerate(portlets): for index_in_tab, value in enumerate(tab): if tab_idx == 0 and index_in_tab == 0: self.assertEqual(value, [portlet_id, "TimeSeries"]) else: self.assertEqual(value, [-1, "None"]) self.assertTrue(result_dict['draw_hidden_ranges']) def test_load_burst_history(self): """ Create two burst, load the burst and check that we get back the same stored bursts. """ self._store_burst(self.test_project.id, 'started', {'test': 'test'}, 'burst1') burst = self._store_burst(self.test_project.id, 'started', {'test': 'test'}, 'burst2') cherrypy.session[common.KEY_BURST_CONFIG] = burst result_dict = self.burst_c.load_burst_history() burst_history = result_dict['burst_list'] self.assertEqual(len(burst_history), 2) for burst in burst_history: self.assertTrue(burst.name in ('burst1', 'burst2')) def test_get_selected_burst(self): """ Create burst, add it to session, then check that get_selected_burst return the same burst. Also check that for an unstored entity we get back 'None' """ burst_entity = BurstConfiguration(self.test_project.id, 'started', {}, 'burst1') cherrypy.session[common.KEY_BURST_CONFIG] = burst_entity stored_id = self.burst_c.get_selected_burst() self.assertEqual(stored_id, 'None') burst_entity = dao.store_entity(burst_entity) cherrypy.session[common.KEY_BURST_CONFIG] = burst_entity stored_id = self.burst_c.get_selected_burst() self.assertEqual(str(stored_id), str(burst_entity.id)) def test_get_portlet_configurable_interface(self): """ Look up that an AdapterConfiguration is returned for the default portlet configuration, if we look at index (0, 0) where TimeSeries portlet should be default. """ self.burst_c.index() result = self.burst_c.get_portlet_configurable_interface(0) self.assertTrue(common.KEY_PARAMETERS_CONFIG in result) self.assertFalse(result[common.KEY_PARAMETERS_CONFIG]) adapter_config = result['adapters_list'] # Default TimeSeries portlet should be available, so we expect # adapter_config to be a list of AdapterConfiguration with one element self.assertEqual(len(adapter_config), 1) self.assertTrue(isinstance(adapter_config[0], AdapterConfiguration)) def test_portlet_tab_display(self): """ Update the default portlet configuration, by storing a TimeSeries portlet for all postions. Then check that we get the same configuration. """ self.burst_c.index() portlet_id = dao.get_portlet_by_identifier("TimeSeries").id one_tab = [[portlet_id, "TimeSeries"] for _ in range(NUMBER_OF_PORTLETS_PER_TAB)] full_tabs = [one_tab for _ in range(BurstConfiguration.nr_of_tabs)] data = {'tab_portlets_list': json.dumps(full_tabs)} result = self.burst_c.portlet_tab_display(**data) selected_portlets = result['portlet_tab_list'] for entry in selected_portlets: self.assertEqual(entry.id, portlet_id) def test_get_configured_portlets_no_session(self): """ Test that if we have no burst stored in session, an empty portlet list is reduced. """ result = self.burst_c.get_configured_portlets() self.assertTrue('portlet_tab_list' in result) self.assertTrue(result['portlet_tab_list'] == []) def test_get_configured_portlets_default(self): """ Check that the default configuration holds one portlet and it's identifier is 'TimeSeries'. """ self.burst_c.index() result = self.burst_c.get_configured_portlets() self.assertTrue('portlet_tab_list' in result) portlets_list = result['portlet_tab_list'] self.assertEqual(len(portlets_list), 1) self.assertTrue(portlets_list[0].algorithm_identifier == 'TimeSeries') def test_get_portlet_session_configuration(self): """ Test that the default portlet session sonciguration is generated as expected, with a default TimeSeries portlet and rest empty. """ self.burst_c.index() result = json.loads(self.burst_c.get_portlet_session_configuration()) portlet_id = dao.get_portlet_by_identifier("TimeSeries").id for tab_idx, tab in enumerate(result): for index_in_tab, value in enumerate(tab): if tab_idx == 0 and index_in_tab == 0: self.assertEqual(value, [portlet_id, "TimeSeries"]) else: self.assertEqual(value, [-1, "None"]) def test_save_parameters_no_relaunch(self): """ Test the save parameters for the default TimeSeries portlet and pass an empty dictionary as the 'new' data. In this case a relaunch should not be required. """ self.burst_c.index() self.assertEqual( 'noRelaunch', self.burst_c.save_parameters(0, portlet_parameters="{}")) def test_rename_burst(self): """ Create and store a burst, then rename it and check that it works as expected. """ burst = self._store_burst(self.test_project.id, 'started', {'test': 'test'}, 'burst1') self.burst_c.rename_burst(burst.id, "test_new_burst_name") renamed_burst = dao.get_burst_by_id(burst.id) self.assertEqual(renamed_burst.name, "test_new_burst_name") def test_launch_burst(self): """ Launch a burst and check that it finishes correctly and before timeout (100) """ self.burst_c.index() connectivity = self._burst_create_connectivity() launch_params = copy.deepcopy(SIMULATOR_PARAMETERS) launch_params['connectivity'] = connectivity.gid launch_params['simulation_length'] = '10' launch_params = {"simulator_parameters": json.dumps(launch_params)} burst_id = json.loads( self.burst_c.launch_burst("new", "test_burst", **launch_params))['id'] waited = 1 timeout = 100 burst_config = dao.get_burst_by_id(burst_id) while burst_config.status == BurstConfiguration.BURST_RUNNING and waited <= timeout: sleep(0.5) waited += 0.5 burst_config = dao.get_burst_by_id(burst_config.id) if waited > timeout: self.fail("Timed out waiting for simulations to finish.") if burst_config.status != BurstConfiguration.BURST_FINISHED: BurstService().stop_burst(burst_config) self.fail("Burst should have finished successfully.") def test_load_burst(self): """ Test loading and burst and checking you get expected dictionary. """ self.burst_c.index() burst = self._store_burst(self.test_project.id, 'started', {'test': 'test'}, 'burst1') result = json.loads(self.burst_c.load_burst(burst.id)) self.assertEqual(result["status"], "started") self.assertEqual(result['group_gid'], None) self.assertEqual(result['selected_tab'], 0) def test_load_burst_removed(self): """ Add burst to session, then remove burst from database. Try to load burst and check that it will raise exception and remove it from session. """ burst = self._store_burst(self.test_project.id, 'started', {'test': 'test'}, 'burst1') cherrypy.session[common.KEY_BURST_CONFIG] = burst burst_id = burst.id BurstService().cancel_or_remove_burst(burst_id) self.assertRaises(Exception, self.burst_c.load_burst, burst_id) self.assertTrue(common.KEY_BURST_CONFIG not in cherrypy.session) def test_remove_burst_not_session(self): """ Test removing a burst that is not the one currently stored in session. SHould just remove and return a 'done' string. """ burst = self._store_burst(self.test_project.id, 'finished', {'test': 'test'}, 'burst1') cherrypy.session[common.KEY_BURST_CONFIG] = burst another_burst = self._store_burst(self.test_project.id, 'finished', {'test': 'test'}, 'burst1') result = self.burst_c.cancel_or_remove_burst(another_burst.id) self.assertEqual(result, 'done') def test_remove_burst_in_session(self): """ Test that if we remove the burst that is the current one from the session, we get a 'reset-new' string as result. """ burst = self._store_burst(self.test_project.id, 'finished', {'test': 'test'}, 'burst1') cherrypy.session[common.KEY_BURST_CONFIG] = burst result = self.burst_c.cancel_or_remove_burst(burst.id) self.assertEqual(result, 'reset-new') def _store_burst(self, proj_id, status, sim_config, name): """ Create and store a burst entity, for the project given project_id, having the given status and simulator parames config, under the given name. """ burst = BurstConfiguration(proj_id, status, sim_config, name) burst.prepare_before_save() return dao.store_entity(burst) def _burst_create_connectivity(self): """ Create a connectivity that will be used in "non-dummy" burst launches (with the actual simulator). TODO: This is duplicate code from burstservice_test. Should go into the 'generic' DataType factory once that is done. """ meta = { DataTypeMetaData.KEY_SUBJECT: "John Doe", DataTypeMetaData.KEY_STATE: "RAW_DATA" } algorithm, algo_group = FlowService( ).get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS) self.operation = model.Operation(self.test_user.id, self.test_project.id, algo_group.id, json.dumps(''), meta=json.dumps(meta), status=model.STATUS_STARTED) self.operation = dao.store_entity(self.operation) storage_path = FilesHelper().get_project_folder( self.test_project, str(self.operation.id)) connectivity = Connectivity(storage_path=storage_path) connectivity.weights = numpy.ones((74, 74)) connectivity.centres = numpy.ones((74, 3)) adapter_instance = StoreAdapter([connectivity]) OperationService().initiate_prelaunch(self.operation, adapter_instance, {}) return connectivity
class TestFlowContoller(BaseControllersTest): """ Unit tests for FlowController """ def setup_method(self): """ Sets up the environment for testing; creates a `FlowController` """ self.init() self.flow_c = FlowController() self.burst_c = BurstController() self.operation_service = OperationService() def teardown_method(self): """ Cleans up the testing environment """ self.cleanup() self.clean_database() @pytest.fixture() def long_burst_launch(self, connectivity_factory): def build(is_range=False): self.burst_c.index() connectivity = connectivity_factory[1] launch_params = copy.deepcopy(SIMULATOR_PARAMETERS) launch_params['connectivity'] = dao.get_datatype_by_id(connectivity.id).gid launch_params['simulation_length'] = '10000' if is_range: launch_params['conduction_speed'] = '[10,15,20]' launch_params[RANGE_PARAMETER_1] = 'conduction_speed' launch_params = {"simulator_parameters": json.dumps(launch_params)} burst_id = json.loads(self.burst_c.launch_burst("new", "test_burst", **launch_params))['id'] return dao.get_burst_by_id(burst_id) return build def test_context_selected(self): """ Remove the project from CherryPy session and check that you are redirected to projects page. """ del cherrypy.session[common.KEY_PROJECT] self._expect_redirect('/project/viewall', self.flow_c.step_analyzers) def test_valid_step(self): """ For all algorithm categories check that a submenu is generated and the result page has it's title given by category name. """ result_dict = self.flow_c.step_analyzers() assert common.KEY_SUBMENU_LIST in result_dict,\ "Expect to have a submenu with available algorithms for category." assert result_dict["section_name"] == 'analyze' def test_step_connectivity(self): """ Check that the correct section name and connectivity sub-menu are returned for the connectivity step. """ result_dict = self.flow_c.step_connectivity() assert result_dict['section_name'] == 'connectivity' assert result_dict['submenu_list'] == self.flow_c.connectivity_submenu def test_default(self): """ Test default method from step controllers. Check that the submit link is ok, that a mainContent is present in result dict and that the isAdapter flag is set to true. """ cherrypy.request.method = "GET" categories = dao.get_algorithm_categories() for categ in categories: algo_groups = dao.get_adapters_from_categories([categ.id]) for algo in algo_groups: result_dict = self.flow_c.default(categ.id, algo.id) assert result_dict[common.KEY_SUBMIT_LINK] == '/flow/%i/%i' % (categ.id, algo.id) assert 'mainContent' in result_dict assert result_dict['isAdapter'] def test_default_cancel(self): """ On cancel we should get a redirect to the back page link. """ cherrypy.request.method = "POST" categories = dao.get_algorithm_categories() algo_groups = dao.get_adapters_from_categories([categories[0].id]) self._expect_redirect('/project/viewoperations/%i' % self.test_project.id, self.flow_c.default, categories[0].id, algo_groups[0].id, cancel=True, back_page='operations') def test_default_invalid_key(self): """ Pass invalid keys for adapter and step and check you get redirect to tvb entry page with error set. """ self._expect_redirect('/tvb?error=True', self.flow_c.default, 'invalid', 'invalid') def test_read_datatype_attribute(self, datatype_with_storage_factory): """ Read an attribute from a datatype. """ dt = datatype_with_storage_factory("test_subject", "RAW_STATE", 'this is the stored data'.split()) returned_data = self.flow_c.read_datatype_attribute(dt.gid, "string_data") assert returned_data == '["this", "is", "the", "stored", "data"]' def test_read_datatype_attribute_method_call(self, datatype_with_storage_factory): """ Call method on given datatype. """ dt =datatype_with_storage_factory("test_subject", "RAW_STATE", 'this is the stored data'.split()) args = {'length': 101} returned_data = self.flow_c.read_datatype_attribute(dt.gid, 'return_test_data', **args) assert returned_data == str(list(range(101))) def test_get_simple_adapter_interface(self): adapter = dao.get_algorithm_by_module('tvb.tests.framework.adapters.testadapter1', 'TestAdapter1') result = self.flow_c.get_simple_adapter_interface(adapter.id) expected_interface = TestAdapter1().get_input_tree() assert result['inputList'] == expected_interface def _wait_for_burst_ops(self, burst_config): """ sleeps until some operation of the burst is created""" waited = 1 timeout = 50 operations = dao.get_operations_in_burst(burst_config.id) while not len(operations) and waited <= timeout: sleep(1) waited += 1 operations = dao.get_operations_in_burst(burst_config.id) operations = dao.get_operations_in_burst(burst_config.id) return operations def test_stop_burst_operation(self, long_burst_launch): burst_config = long_burst_launch operation = self._wait_for_burst_ops(burst_config)[0] assert not operation.has_finished self.flow_c.stop_burst_operation(operation.id, 0, False) operation = dao.get_operation_by_id(operation.id) assert operation.status == STATUS_CANCELED def test_stop_burst_operation_group(self, long_burst_launch): burst_config = long_burst_launch(True) operations = self._wait_for_burst_ops(burst_config) operations_group_id = 0 for operation in operations: assert not operation.has_finished operations_group_id = operation.fk_operation_group self.flow_c.stop_burst_operation(operations_group_id, 1, False) for operation in operations: operation = dao.get_operation_by_id(operation.id) assert operation.status == STATUS_CANCELED def test_remove_burst_operation(self, long_burst_launch): burst_config = long_burst_launch operation = self._wait_for_burst_ops(burst_config)[0] assert not operation.has_finished self.flow_c.stop_burst_operation(operation.id, 0, True) operation = dao.try_get_operation_by_id(operation.id) assert operation is None def test_remove_burst_operation_group(self, long_burst_launch): burst_config = long_burst_launch(True) operations = self._wait_for_burst_ops(burst_config) operations_group_id = 0 for operation in operations: assert not operation.has_finished operations_group_id = operation.fk_operation_group self.flow_c.stop_burst_operation(operations_group_id, 1, True) for operation in operations: operation = dao.try_get_operation_by_id(operation.id) assert operation is None def _launch_test_algo_on_cluster(self, **data): adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter1", "TestAdapter1") algo = adapter.stored_adapter algo_category = dao.get_category_by_id(algo.fk_category) operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo, algo_category, {}, **data) self.operation_service._send_to_cluster(operations, adapter) return operations def test_stop_operations(self): data = {"test1_val1": 5, 'test1_val2': 5} operations = self._launch_test_algo_on_cluster(**data) operation = dao.get_operation_by_id(operations[0].id) assert not operation.has_finished self.flow_c.stop_operation(operation.id, 0, False) operation = dao.get_operation_by_id(operation.id) assert operation.status == STATUS_CANCELED def test_stop_operations_group(self): data = {RANGE_PARAMETER_1: "test1_val1", "test1_val1": '5,6,7', 'test1_val2': 5} operations = self._launch_test_algo_on_cluster(**data) operation_group_id = 0 for operation in operations: operation = dao.get_operation_by_id(operation.id) assert not operation.has_finished operation_group_id = operation.fk_operation_group self.flow_c.stop_operation(operation_group_id, 1, False) for operation in operations: operation = dao.get_operation_by_id(operation.id) assert operation.status == STATUS_CANCELED
class FlowContollerTest(BaseControllersTest): """ Unit tests for FlowController """ def setUp(self): """ Sets up the environment for testing; creates a `FlowController` """ self.init() self.flow_c = FlowController() self.burst_c = BurstController() self.operation_service = OperationService() def tearDown(self): """ Cleans up the testing environment """ self.cleanup() self.clean_database() def test_context_selected(self): """ Remove the project from CherryPy session and check that you are redirected to projects page. """ del cherrypy.session[common.KEY_PROJECT] self._expect_redirect('/project/viewall', self.flow_c.step) def test_invalid_step(self): """ Pass an invalid step and make sure we are redirected to tvb start page. """ self._expect_redirect('/tvb', self.flow_c.step) def test_valid_step(self): """ For all algorithm categories check that a submenu is generated and the result page has it's title given by category name. """ categories = dao.get_algorithm_categories() for categ in categories: result_dict = self.flow_c.step(categ.id) self.assertTrue(common.KEY_SUBMENU_LIST in result_dict, "Expect to have a submenu with available algorithms for category.") self.assertEqual(result_dict["section_name"], categ.displayname.lower()) def test_step_connectivity(self): """ Check that the correct section name and connectivity sub-menu are returned for the connectivity step. """ result_dict = self.flow_c.step_connectivity() self.assertEqual(result_dict['section_name'], 'connectivity') self.assertEqual(result_dict['submenu_list'], self.flow_c.connectivity_submenu) def test_default(self): """ Test default method from step controllers. Check that the submit link is ok, that a mainContent is present in result dict and that the isAdapter flag is set to true. """ cherrypy.request.method = "GET" categories = dao.get_algorithm_categories() for categ in categories: algo_groups = dao.get_groups_by_categories([categ.id]) for algo in algo_groups: result_dict = self.flow_c.default(categ.id, algo.id) self.assertEqual(result_dict[common.KEY_SUBMIT_LINK], '/flow/%i/%i' % (categ.id, algo.id)) self.assertTrue('mainContent' in result_dict) self.assertTrue(result_dict['isAdapter']) def test_default_cancel(self): """ On cancel we should get a redirect to the back page link. """ cherrypy.request.method = "POST" categories = dao.get_algorithm_categories() algo_groups = dao.get_groups_by_categories([categories[0].id]) self._expect_redirect('/project/viewoperations/%i' % self.test_project.id, self.flow_c.default, categories[0].id, algo_groups[0].id, cancel=True, back_page='operations') def test_default_invalid_key(self): """ Pass invalid keys for adapter and step and check you get redirect to tvb entry page with error set. """ self._expect_redirect('/tvb?error=True', self.flow_c.default, 'invalid', 'invalid') def test_read_datatype_attribute(self): """ Read an attribute from a datatype. """ dt = DatatypesFactory().create_datatype_with_storage("test_subject", "RAW_STATE", 'this is the stored data'.split()) returned_data = self.flow_c.read_datatype_attribute(dt.gid, "string_data") self.assertEqual(returned_data, '["this", "is", "the", "stored", "data"]') def test_read_datatype_attribute_method_call(self): """ Call method on given datatype. """ dt = DatatypesFactory().create_datatype_with_storage("test_subject", "RAW_STATE", 'this is the stored data'.split()) args = {'length': 101} returned_data = self.flow_c.read_datatype_attribute(dt.gid, 'return_test_data', **args) self.assertTrue(returned_data == str(range(101))) def test_get_simple_adapter_interface(self): adapter = dao.find_group('tvb.tests.framework.adapters.testadapter1', 'TestAdapter1') result = self.flow_c.get_simple_adapter_interface(adapter.id) expected_interface = TestAdapter1().get_input_tree() self.assertEqual(result['inputList'], expected_interface) def _long_burst_launch(self, is_range=False): self.burst_c.index() connectivity = DatatypesFactory().create_connectivity()[1] launch_params = copy.deepcopy(SIMULATOR_PARAMETERS) launch_params['connectivity'] = dao.get_datatype_by_id(connectivity.id).gid if not is_range: launch_params['simulation_length'] = '10000' else: launch_params['simulation_length'] = '[10000,10001,10002]' launch_params[model.RANGE_PARAMETER_1] = 'simulation_length' launch_params = {"simulator_parameters": json.dumps(launch_params)} burst_id = json.loads(self.burst_c.launch_burst("new", "test_burst", **launch_params))['id'] return dao.get_burst_by_id(burst_id) def _wait_for_burst_ops(self, burst_config): """ sleeps until some operation of the burst is created""" waited = 1 timeout = 50 operations = dao.get_operations_in_burst(burst_config.id) while not len(operations) and waited <= timeout: sleep(1) waited += 1 operations = dao.get_operations_in_burst(burst_config.id) operations = dao.get_operations_in_burst(burst_config.id) return operations def test_stop_burst_operation(self): burst_config = self._long_burst_launch() operation = self._wait_for_burst_ops(burst_config)[0] self.assertFalse(operation.has_finished) self.flow_c.stop_burst_operation(operation.id, 0, False) operation = dao.get_operation_by_id(operation.id) self.assertEqual(operation.status, model.STATUS_CANCELED) def test_stop_burst_operation_group(self): burst_config = self._long_burst_launch(True) operations = self._wait_for_burst_ops(burst_config) operations_group_id = 0 for operation in operations: self.assertFalse(operation.has_finished) operations_group_id = operation.fk_operation_group self.flow_c.stop_burst_operation(operations_group_id, 1, False) for operation in operations: operation = dao.get_operation_by_id(operation.id) self.assertEqual(operation.status, model.STATUS_CANCELED) def test_remove_burst_operation(self): burst_config = self._long_burst_launch() operation = self._wait_for_burst_ops(burst_config)[0] self.assertFalse(operation.has_finished) self.flow_c.stop_burst_operation(operation.id, 0, True) operation = dao.try_get_operation_by_id(operation.id) self.assertTrue(operation is None) def test_remove_burst_operation_group(self): burst_config = self._long_burst_launch(True) operations = self._wait_for_burst_ops(burst_config) operations_group_id = 0 for operation in operations: self.assertFalse(operation.has_finished) operations_group_id = operation.fk_operation_group self.flow_c.stop_burst_operation(operations_group_id, 1, True) for operation in operations: operation = dao.try_get_operation_by_id(operation.id) self.assertTrue(operation is None) def _launch_test_algo_on_cluster(self, **data): module = "tvb.tests.framework.adapters.testadapter1" class_name = "TestAdapter1" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) algo_group = adapter.algorithm_group algo_category = dao.get_category_by_id(algo_group.fk_category) algo = dao.get_algorithm_by_group(algo_group.id) operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo, algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data) self.operation_service._send_to_cluster(operations, adapter) return operations def test_stop_operations(self): data = {"test1_val1": 5, 'test1_val2': 5} operations = self._launch_test_algo_on_cluster(**data) operation = dao.get_operation_by_id(operations[0].id) self.assertFalse(operation.has_finished) self.flow_c.stop_operation(operation.id, 0, False) operation = dao.get_operation_by_id(operation.id) self.assertEqual(operation.status, model.STATUS_CANCELED) def test_stop_operations_group(self): data = {model.RANGE_PARAMETER_1: "test1_val1", "test1_val1": '5,6,7', 'test1_val2': 5} operations = self._launch_test_algo_on_cluster(**data) operation_group_id = 0 for operation in operations: operation = dao.get_operation_by_id(operation.id) self.assertFalse(operation.has_finished) operation_group_id = operation.fk_operation_group self.flow_c.stop_operation(operation_group_id, 1, False) for operation in operations: operation = dao.get_operation_by_id(operation.id) self.assertEqual(operation.status, model.STATUS_CANCELED)