class TestFlowController(BaseControllersTest): """ Unit tests for FlowController """ def setup_method(self): """ Sets up the environment for testing; creates a `FlowController` """ self.init() self.flow_c = FlowController() self.burst_c = SimulatorController() self.operation_service = OperationService() def teardown_method(self): """ Cleans up the testing environment """ self.cleanup() self.clean_database() def test_context_selected(self): """ Remove the project from CherryPy session and check that you are redirected to projects page. """ del cherrypy.session[common.KEY_PROJECT] self._expect_redirect('/project/viewall', self.flow_c.step_analyzers) def test_valid_step(self): """ For all algorithm categories check that a submenu is generated and the result page has it's title given by category name. """ result_dict = self.flow_c.step_analyzers() assert common.KEY_SUBMENU_LIST in result_dict, \ "Expect to have a submenu with available algorithms for category." assert result_dict["section_name"] == 'analyze' def test_step_connectivity(self): """ Check that the correct section name and connectivity sub-menu are returned for the connectivity step. """ result_dict = self.flow_c.step_connectivity() assert result_dict['section_name'] == 'connectivity' assert result_dict['submenu_list'] == self.flow_c.connectivity_submenu def test_default(self): """ Test default method from step controllers. Check that the submit link is ok, that a mainContent is present in result dict and that the isAdapter flag is set to true. """ cherrypy.request.method = "GET" categories = dao.get_algorithm_categories() for categ in categories: # Ignore creators, as those won't go through this flow if categ.displayname in [ CreateAlgorithmCategoryConfig.category_name ]: continue algo_groups = dao.get_adapters_from_categories([categ.id]) for algo in algo_groups: result_dict = self.flow_c.default(categ.id, algo.id) assert result_dict[common.KEY_SUBMIT_LINK] == '/flow/%i/%i' % ( categ.id, algo.id) assert 'mainContent' in result_dict assert result_dict['isAdapter'] def test_default_cancel(self): """ On cancel we should get a redirect to the back page link. """ cherrypy.request.method = "POST" categories = dao.get_algorithm_categories() algo_groups = dao.get_adapters_from_categories([categories[0].id]) self._expect_redirect('/project/viewoperations/%i' % self.test_project.id, self.flow_c.default, categories[0].id, algo_groups[0].id, cancel=True, back_page='operations') def test_default_invalid_key(self): """ Pass invalid keys for adapter and step and check you get redirect to tvb entry page with error set. """ self._expect_redirect('/tvb?error=True', self.flow_c.default, 'invalid', 'invalid') def test_read_datatype_attribute(self, dummy_datatype_index_factory): """ Read an attribute from a datatype. """ dt = dummy_datatype_index_factory(row1='This is stored data') dt.subject = "test_subject" dt.state = "RAW_STATE" returned_data = self.flow_c.read_datatype_attribute(dt.gid, "row1") assert returned_data == '"This is stored data"' def test_read_datatype_attribute_method_call(self, dummy_datatype_index_factory): """ Call method on given datatype. """ dt = dummy_datatype_index_factory(row1='This is stored data') args = {'length': 101} returned_data = self.flow_c.read_datatype_attribute( dt.gid, 'return_test_data', **args) assert returned_data.replace('"', '') == " ".join( str(x) for x in range(101)) def test_get_simple_adapter_interface(self, test_adapter_factory): algo = test_adapter_factory() form = TestAdapter1Form() adapter = TestFactory.create_adapter( 'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1') adapter.submit_form(form) result = self.flow_c.get_simple_adapter_interface(algo.id) expected_interface = adapter.get_form() found_form = result['adapter_form']['adapter_form'] assert isinstance(result['adapter_form'], dict) assert isinstance(found_form, TestAdapter1Form) assert found_form.test1_val1.value == expected_interface.test1_val1.value assert found_form.test1_val2.value == expected_interface.test1_val2.value def test_stop_burst_operation(self, simulation_launch): operation = simulation_launch(self.test_user, self.test_project, 1000) assert not operation.has_finished self.flow_c.cancel_or_remove_operation(operation.id, 0, False) operation = dao.get_operation_by_id(operation.id) assert operation.status == STATUS_CANCELED def test_stop_burst_operation_group(self, simulation_launch): first_op = simulation_launch(self.test_user, self.test_project, 1000, True) operations_group_id = first_op.fk_operation_group assert not first_op.has_finished self.flow_c.cancel_or_remove_operation(operations_group_id, 1, False) operations = dao.get_operations_in_group(operations_group_id) for operation in operations: operation = dao.get_operation_by_id(operation.id) assert operation.status == STATUS_CANCELED def test_remove_burst_operation(self, simulation_launch): operation = simulation_launch(self.test_user, self.test_project, 1000) assert not operation.has_finished self.flow_c.cancel_or_remove_operation(operation.id, 0, True) operation = dao.try_get_operation_by_id(operation.id) assert operation is None def test_remove_burst_operation_group(self, simulation_launch): first_op = simulation_launch(self.test_user, self.test_project, 1000, True) operations_group_id = first_op.fk_operation_group assert not first_op.has_finished self.flow_c.cancel_or_remove_operation(operations_group_id, 1, True) operations = dao.get_operations_in_group(operations_group_id) for operation in operations: operation = dao.try_get_operation_by_id(operation.id) assert operation is None def _asynch_launch_simple_op(self): adapter = TestFactory.create_adapter( 'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1') view_model = TestModel() view_model.test1_val1 = 5 view_model.test1_val2 = 6 algo = adapter.stored_adapter operation = self.operation_service.prepare_operation( self.test_user.id, self.test_project, algo, view_model=view_model) self.operation_service._send_to_cluster(operation, adapter) return operation def test_stop_operation(self): operation = self._asynch_launch_simple_op() operation = dao.get_operation_by_id(operation.id) assert not operation.has_finished self.flow_c.cancel_or_remove_operation(operation.id, 0, False) operation = dao.get_operation_by_id(operation.id) assert operation.status == STATUS_CANCELED def test_stop_operations_group(self, test_adapter_factory, datatype_group_factory): group = datatype_group_factory(status=STATUS_STARTED, store_vm=True) operations = dao.get_operations_in_group(group.fk_from_operation) operation_group_id = 0 for operation in operations: operation = dao.get_operation_by_id(operation.id) assert not operation.has_finished operation_group_id = operation.fk_operation_group self.flow_c.cancel_or_remove_operation(operation_group_id, 1, False) for operation in operations: operation = dao.get_operation_by_id(operation.id) assert operation.status == STATUS_CANCELED
class TestFlowContoller(BaseControllersTest): """ Unit tests for FlowController """ def setup_method(self): """ Sets up the environment for testing; creates a `FlowController` """ self.init() self.flow_c = FlowController() self.burst_c = BurstController() self.operation_service = OperationService() def teardown_method(self): """ Cleans up the testing environment """ self.cleanup() self.clean_database() @pytest.fixture() def long_burst_launch(self, connectivity_factory): def build(is_range=False): self.burst_c.index() connectivity = connectivity_factory[1] launch_params = copy.deepcopy(SIMULATOR_PARAMETERS) launch_params['connectivity'] = dao.get_datatype_by_id(connectivity.id).gid launch_params['simulation_length'] = '10000' if is_range: launch_params['conduction_speed'] = '[10,15,20]' launch_params[RANGE_PARAMETER_1] = 'conduction_speed' launch_params = {"simulator_parameters": json.dumps(launch_params)} burst_id = json.loads(self.burst_c.launch_burst("new", "test_burst", **launch_params))['id'] return dao.get_burst_by_id(burst_id) return build def test_context_selected(self): """ Remove the project from CherryPy session and check that you are redirected to projects page. """ del cherrypy.session[common.KEY_PROJECT] self._expect_redirect('/project/viewall', self.flow_c.step_analyzers) def test_valid_step(self): """ For all algorithm categories check that a submenu is generated and the result page has it's title given by category name. """ result_dict = self.flow_c.step_analyzers() assert common.KEY_SUBMENU_LIST in result_dict,\ "Expect to have a submenu with available algorithms for category." assert result_dict["section_name"] == 'analyze' def test_step_connectivity(self): """ Check that the correct section name and connectivity sub-menu are returned for the connectivity step. """ result_dict = self.flow_c.step_connectivity() assert result_dict['section_name'] == 'connectivity' assert result_dict['submenu_list'] == self.flow_c.connectivity_submenu def test_default(self): """ Test default method from step controllers. Check that the submit link is ok, that a mainContent is present in result dict and that the isAdapter flag is set to true. """ cherrypy.request.method = "GET" categories = dao.get_algorithm_categories() for categ in categories: algo_groups = dao.get_adapters_from_categories([categ.id]) for algo in algo_groups: result_dict = self.flow_c.default(categ.id, algo.id) assert result_dict[common.KEY_SUBMIT_LINK] == '/flow/%i/%i' % (categ.id, algo.id) assert 'mainContent' in result_dict assert result_dict['isAdapter'] def test_default_cancel(self): """ On cancel we should get a redirect to the back page link. """ cherrypy.request.method = "POST" categories = dao.get_algorithm_categories() algo_groups = dao.get_adapters_from_categories([categories[0].id]) self._expect_redirect('/project/viewoperations/%i' % self.test_project.id, self.flow_c.default, categories[0].id, algo_groups[0].id, cancel=True, back_page='operations') def test_default_invalid_key(self): """ Pass invalid keys for adapter and step and check you get redirect to tvb entry page with error set. """ self._expect_redirect('/tvb?error=True', self.flow_c.default, 'invalid', 'invalid') def test_read_datatype_attribute(self, datatype_with_storage_factory): """ Read an attribute from a datatype. """ dt = datatype_with_storage_factory("test_subject", "RAW_STATE", 'this is the stored data'.split()) returned_data = self.flow_c.read_datatype_attribute(dt.gid, "string_data") assert returned_data == '["this", "is", "the", "stored", "data"]' def test_read_datatype_attribute_method_call(self, datatype_with_storage_factory): """ Call method on given datatype. """ dt =datatype_with_storage_factory("test_subject", "RAW_STATE", 'this is the stored data'.split()) args = {'length': 101} returned_data = self.flow_c.read_datatype_attribute(dt.gid, 'return_test_data', **args) assert returned_data == str(list(range(101))) def test_get_simple_adapter_interface(self): adapter = dao.get_algorithm_by_module('tvb.tests.framework.adapters.testadapter1', 'TestAdapter1') result = self.flow_c.get_simple_adapter_interface(adapter.id) expected_interface = TestAdapter1().get_input_tree() assert result['inputList'] == expected_interface def _wait_for_burst_ops(self, burst_config): """ sleeps until some operation of the burst is created""" waited = 1 timeout = 50 operations = dao.get_operations_in_burst(burst_config.id) while not len(operations) and waited <= timeout: sleep(1) waited += 1 operations = dao.get_operations_in_burst(burst_config.id) operations = dao.get_operations_in_burst(burst_config.id) return operations def test_stop_burst_operation(self, long_burst_launch): burst_config = long_burst_launch operation = self._wait_for_burst_ops(burst_config)[0] assert not operation.has_finished self.flow_c.stop_burst_operation(operation.id, 0, False) operation = dao.get_operation_by_id(operation.id) assert operation.status == STATUS_CANCELED def test_stop_burst_operation_group(self, long_burst_launch): burst_config = long_burst_launch(True) operations = self._wait_for_burst_ops(burst_config) operations_group_id = 0 for operation in operations: assert not operation.has_finished operations_group_id = operation.fk_operation_group self.flow_c.stop_burst_operation(operations_group_id, 1, False) for operation in operations: operation = dao.get_operation_by_id(operation.id) assert operation.status == STATUS_CANCELED def test_remove_burst_operation(self, long_burst_launch): burst_config = long_burst_launch operation = self._wait_for_burst_ops(burst_config)[0] assert not operation.has_finished self.flow_c.stop_burst_operation(operation.id, 0, True) operation = dao.try_get_operation_by_id(operation.id) assert operation is None def test_remove_burst_operation_group(self, long_burst_launch): burst_config = long_burst_launch(True) operations = self._wait_for_burst_ops(burst_config) operations_group_id = 0 for operation in operations: assert not operation.has_finished operations_group_id = operation.fk_operation_group self.flow_c.stop_burst_operation(operations_group_id, 1, True) for operation in operations: operation = dao.try_get_operation_by_id(operation.id) assert operation is None def _launch_test_algo_on_cluster(self, **data): adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter1", "TestAdapter1") algo = adapter.stored_adapter algo_category = dao.get_category_by_id(algo.fk_category) operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo, algo_category, {}, **data) self.operation_service._send_to_cluster(operations, adapter) return operations def test_stop_operations(self): data = {"test1_val1": 5, 'test1_val2': 5} operations = self._launch_test_algo_on_cluster(**data) operation = dao.get_operation_by_id(operations[0].id) assert not operation.has_finished self.flow_c.stop_operation(operation.id, 0, False) operation = dao.get_operation_by_id(operation.id) assert operation.status == STATUS_CANCELED def test_stop_operations_group(self): data = {RANGE_PARAMETER_1: "test1_val1", "test1_val1": '5,6,7', 'test1_val2': 5} operations = self._launch_test_algo_on_cluster(**data) operation_group_id = 0 for operation in operations: operation = dao.get_operation_by_id(operation.id) assert not operation.has_finished operation_group_id = operation.fk_operation_group self.flow_c.stop_operation(operation_group_id, 1, False) for operation in operations: operation = dao.get_operation_by_id(operation.id) assert operation.status == STATUS_CANCELED
class FlowContollerTest(BaseControllersTest): """ Unit tests for FlowController """ def setUp(self): """ Sets up the environment for testing; creates a `FlowController` """ self.init() self.flow_c = FlowController() self.burst_c = BurstController() self.operation_service = OperationService() def tearDown(self): """ Cleans up the testing environment """ self.cleanup() self.clean_database() def test_context_selected(self): """ Remove the project from CherryPy session and check that you are redirected to projects page. """ del cherrypy.session[common.KEY_PROJECT] self._expect_redirect('/project/viewall', self.flow_c.step) def test_invalid_step(self): """ Pass an invalid step and make sure we are redirected to tvb start page. """ self._expect_redirect('/tvb', self.flow_c.step) def test_valid_step(self): """ For all algorithm categories check that a submenu is generated and the result page has it's title given by category name. """ categories = dao.get_algorithm_categories() for categ in categories: result_dict = self.flow_c.step(categ.id) self.assertTrue(common.KEY_SUBMENU_LIST in result_dict, "Expect to have a submenu with available algorithms for category.") self.assertEqual(result_dict["section_name"], categ.displayname.lower()) def test_step_connectivity(self): """ Check that the correct section name and connectivity sub-menu are returned for the connectivity step. """ result_dict = self.flow_c.step_connectivity() self.assertEqual(result_dict['section_name'], 'connectivity') self.assertEqual(result_dict['submenu_list'], self.flow_c.connectivity_submenu) def test_default(self): """ Test default method from step controllers. Check that the submit link is ok, that a mainContent is present in result dict and that the isAdapter flag is set to true. """ cherrypy.request.method = "GET" categories = dao.get_algorithm_categories() for categ in categories: algo_groups = dao.get_groups_by_categories([categ.id]) for algo in algo_groups: result_dict = self.flow_c.default(categ.id, algo.id) self.assertEqual(result_dict[common.KEY_SUBMIT_LINK], '/flow/%i/%i' % (categ.id, algo.id)) self.assertTrue('mainContent' in result_dict) self.assertTrue(result_dict['isAdapter']) def test_default_cancel(self): """ On cancel we should get a redirect to the back page link. """ cherrypy.request.method = "POST" categories = dao.get_algorithm_categories() algo_groups = dao.get_groups_by_categories([categories[0].id]) self._expect_redirect('/project/viewoperations/%i' % self.test_project.id, self.flow_c.default, categories[0].id, algo_groups[0].id, cancel=True, back_page='operations') def test_default_invalid_key(self): """ Pass invalid keys for adapter and step and check you get redirect to tvb entry page with error set. """ self._expect_redirect('/tvb?error=True', self.flow_c.default, 'invalid', 'invalid') def test_read_datatype_attribute(self): """ Read an attribute from a datatype. """ dt = DatatypesFactory().create_datatype_with_storage("test_subject", "RAW_STATE", 'this is the stored data'.split()) returned_data = self.flow_c.read_datatype_attribute(dt.gid, "string_data") self.assertEqual(returned_data, '["this", "is", "the", "stored", "data"]') def test_read_datatype_attribute_method_call(self): """ Call method on given datatype. """ dt = DatatypesFactory().create_datatype_with_storage("test_subject", "RAW_STATE", 'this is the stored data'.split()) args = {'length': 101} returned_data = self.flow_c.read_datatype_attribute(dt.gid, 'return_test_data', **args) self.assertTrue(returned_data == str(range(101))) def test_get_simple_adapter_interface(self): adapter = dao.find_group('tvb.tests.framework.adapters.testadapter1', 'TestAdapter1') result = self.flow_c.get_simple_adapter_interface(adapter.id) expected_interface = TestAdapter1().get_input_tree() self.assertEqual(result['inputList'], expected_interface) def _long_burst_launch(self, is_range=False): self.burst_c.index() connectivity = DatatypesFactory().create_connectivity()[1] launch_params = copy.deepcopy(SIMULATOR_PARAMETERS) launch_params['connectivity'] = dao.get_datatype_by_id(connectivity.id).gid if not is_range: launch_params['simulation_length'] = '10000' else: launch_params['simulation_length'] = '[10000,10001,10002]' launch_params[model.RANGE_PARAMETER_1] = 'simulation_length' launch_params = {"simulator_parameters": json.dumps(launch_params)} burst_id = json.loads(self.burst_c.launch_burst("new", "test_burst", **launch_params))['id'] return dao.get_burst_by_id(burst_id) def _wait_for_burst_ops(self, burst_config): """ sleeps until some operation of the burst is created""" waited = 1 timeout = 50 operations = dao.get_operations_in_burst(burst_config.id) while not len(operations) and waited <= timeout: sleep(1) waited += 1 operations = dao.get_operations_in_burst(burst_config.id) operations = dao.get_operations_in_burst(burst_config.id) return operations def test_stop_burst_operation(self): burst_config = self._long_burst_launch() operation = self._wait_for_burst_ops(burst_config)[0] self.assertFalse(operation.has_finished) self.flow_c.stop_burst_operation(operation.id, 0, False) operation = dao.get_operation_by_id(operation.id) self.assertEqual(operation.status, model.STATUS_CANCELED) def test_stop_burst_operation_group(self): burst_config = self._long_burst_launch(True) operations = self._wait_for_burst_ops(burst_config) operations_group_id = 0 for operation in operations: self.assertFalse(operation.has_finished) operations_group_id = operation.fk_operation_group self.flow_c.stop_burst_operation(operations_group_id, 1, False) for operation in operations: operation = dao.get_operation_by_id(operation.id) self.assertEqual(operation.status, model.STATUS_CANCELED) def test_remove_burst_operation(self): burst_config = self._long_burst_launch() operation = self._wait_for_burst_ops(burst_config)[0] self.assertFalse(operation.has_finished) self.flow_c.stop_burst_operation(operation.id, 0, True) operation = dao.try_get_operation_by_id(operation.id) self.assertTrue(operation is None) def test_remove_burst_operation_group(self): burst_config = self._long_burst_launch(True) operations = self._wait_for_burst_ops(burst_config) operations_group_id = 0 for operation in operations: self.assertFalse(operation.has_finished) operations_group_id = operation.fk_operation_group self.flow_c.stop_burst_operation(operations_group_id, 1, True) for operation in operations: operation = dao.try_get_operation_by_id(operation.id) self.assertTrue(operation is None) def _launch_test_algo_on_cluster(self, **data): module = "tvb.tests.framework.adapters.testadapter1" class_name = "TestAdapter1" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) algo_group = adapter.algorithm_group algo_category = dao.get_category_by_id(algo_group.fk_category) algo = dao.get_algorithm_by_group(algo_group.id) operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo, algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data) self.operation_service._send_to_cluster(operations, adapter) return operations def test_stop_operations(self): data = {"test1_val1": 5, 'test1_val2': 5} operations = self._launch_test_algo_on_cluster(**data) operation = dao.get_operation_by_id(operations[0].id) self.assertFalse(operation.has_finished) self.flow_c.stop_operation(operation.id, 0, False) operation = dao.get_operation_by_id(operation.id) self.assertEqual(operation.status, model.STATUS_CANCELED) def test_stop_operations_group(self): data = {model.RANGE_PARAMETER_1: "test1_val1", "test1_val1": '5,6,7', 'test1_val2': 5} operations = self._launch_test_algo_on_cluster(**data) operation_group_id = 0 for operation in operations: operation = dao.get_operation_by_id(operation.id) self.assertFalse(operation.has_finished) operation_group_id = operation.fk_operation_group self.flow_c.stop_operation(operation_group_id, 1, False) for operation in operations: operation = dao.get_operation_by_id(operation.id) self.assertEqual(operation.status, model.STATUS_CANCELED)
class TestOperationService(BaseTestCase): """ Test class for the introspection module. Some tests from here do async launches. For those cases Transactional tests won't work. """ def setup_method(self): """ Reset the database before each test. """ self.clean_database() initialize_storage() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user) self.operation_service = OperationService() self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE def teardown_method(self): """ Reset the database when test is done. """ TvbProfile.current.MAX_DISK_SPACE = self.backup_hdd_size self.clean_database() def _assert_no_ddti(self): count = dao.count_datatypes(self.test_project.id, DummyDataTypeIndex) assert 0 == count def _assert_stored_ddti(self, expected_cnt=1): count = dao.count_datatypes(self.test_project.id, DummyDataTypeIndex) assert expected_cnt == count datatype = dao.try_load_last_entity_of_type(self.test_project.id, DummyDataTypeIndex) assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored." return datatype def test_datatypes_groups(self, test_adapter_factory): """ Tests if the dataType group is set correct on the dataTypes resulted from the same operation group. """ # TODO: re-write this to use groups correctly all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 0, "There should be no operation" algo = test_adapter_factory(TestAdapter3) adapter_instance = ABCAdapter.build_adapter(algo) data = {model_burst.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]} ## Create Group of operations FlowService().fire_operation(adapter_instance, self.test_user, self.test_project.id) all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 1, "Expected one operation group" assert all_operations[0][2] == 2, "Expected 2 operations in group" operation_group_id = all_operations[0][3] assert operation_group_id != None, "The operation should be part of a group." self.operation_service.stop_operation(all_operations[0][0]) self.operation_service.stop_operation(all_operations[0][1]) ## Make sure operations are executed self.operation_service.launch_operation(all_operations[0][0], False) self.operation_service.launch_operation(all_operations[0][1], False) resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id) assert len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes)) dt = dao.get_datatype_by_id(resulted_datatypes[0].id) datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id) assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect" def test_initiate_operation(self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ module = "tvb.tests.framework.adapters.testadapter1" class_name = "TestAdapter1" test_adapter_factory() adapter = TestFactory.create_adapter(module, class_name) output = adapter.get_output() output_type = output[0].__name__ tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") view_model = adapter.get_view_model()() view_model.test1_val1 = 5 view_model.test1_val2 = 5 self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, model_view=view_model) group = dao.get_algorithm_by_module(module, class_name) assert group.module == 'tvb.tests.framework.adapters.testadapter1', "Wrong data stored." assert group.classname == 'TestAdapter1', "Wrong data stored." dts, count = dao.get_values_of_datatype(self.test_project.id, DummyDataTypeIndex) assert count == 1 assert len(dts) == 1 datatype = dao.get_datatype_by_id(dts[0][0]) assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored." assert datatype.type == output_type, "Wrong data stored." def test_delete_dt_free_hdd_space(self, test_adapter_factory, operation_factory): """ Launch two operations and give enough available space for user so that both should finish. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") view_model = adapter.get_view_model()() TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(view_model)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self._assert_no_ddti() self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, model_view=view_model) datatype = self._assert_stored_ddti() # Now free some space and relaunch ProjectService().remove_datatype(self.test_project.id, datatype.gid) self._assert_no_ddti() self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, model_view=view_model) self._assert_stored_ddti() def test_launch_two_ops_hdd_with_space(self, test_adapter_factory): """ Launch two operations and give enough available space for user so that both should finish. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") view_model = adapter.get_view_model()() TvbProfile.current.MAX_DISK_SPACE = 2 * float(adapter.get_required_disk_size(view_model)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, model_view=view_model) datatype = self._assert_stored_ddti() # Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB) # plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float( adapter.get_required_disk_size(view_model)) self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, model_view=view_model) self._assert_stored_ddti(2) def test_launch_two_ops_hdd_full_space(self): """ Launch two operations and give available space for user so that the first should finish, but after the update to the user hdd size the second should not. """ adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") view_model = adapter.get_view_model()() TvbProfile.current.MAX_DISK_SPACE = (1 + float(adapter.get_required_disk_size(view_model))) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, model_view=view_model) datatype = self._assert_stored_ddti() # Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB) # plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \ float(adapter.get_required_disk_size(view_model) - 1) with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, model_view=view_model) self._assert_stored_ddti() def test_launch_operation_hdd_with_space(self): """ Test the actual operation flow by executing a test adapter. """ adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") view_model = adapter.get_view_model()() TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(view_model)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, model_view=view_model) self._assert_stored_ddti() def test_launch_operation_hdd_with_space_started_ops(self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) space_taken_by_started = 100 adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") form = TestAdapterHDDRequiredForm() adapter.submit_form(form) started_operation = model_operation.Operation(self.test_user.id, self.test_project.id, adapter.stored_adapter.id, "", status=model_operation.STATUS_STARTED, estimated_disk_size=space_taken_by_started) view_model = adapter.get_view_model()() dao.store_entity(started_operation) TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(view_model) + space_taken_by_started) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, model_view=view_model) self._assert_stored_ddti() def test_launch_operation_hdd_full_space(self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") form = TestAdapterHDDRequiredForm() adapter.submit_form(form) view_model = adapter.get_view_model()() TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(view_model) - 1) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, model_view=view_model) self._assert_no_ddti() def test_launch_operation_hdd_full_space_started_ops(self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) space_taken_by_started = 100 adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") form = TestAdapterHDDRequiredForm() adapter.submit_form(form) started_operation = model_operation.Operation(self.test_user.id, self.test_project.id, adapter.stored_adapter.id, "", status=model_operation.STATUS_STARTED, estimated_disk_size=space_taken_by_started) view_model = adapter.get_view_model()() dao.store_entity(started_operation) TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(view_model) + space_taken_by_started - 1) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, model_view=view_model) self._assert_no_ddti() def test_stop_operation(self, test_adapter_factory): """ Test that an operation is successfully stopped. """ test_adapter_factory(adapter_class=TestAdapter2) adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter2", "TestAdapter2") view_model = adapter.get_view_model()() view_model.test = 5 algo = adapter.stored_adapter algo_category = dao.get_category_by_id(algo.fk_category) operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project, algo, algo_category, {}, view_model=view_model) self.operation_service._send_to_cluster(operations, adapter) self.operation_service.stop_operation(operations[0].id) operation = dao.get_operation_by_id(operations[0].id) assert operation.status, model_operation.STATUS_CANCELED == "Operation should have been canceled!" def test_stop_operation_finished(self, test_adapter_factory): """ Test that an operation that is already finished is not changed by the stop operation. """ test_adapter_factory() adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter1", "TestAdapter1") view_model = adapter.get_view_model()() view_model.test1_val1 = 5 view_model.test1_val2 = 5 algo = adapter.stored_adapter algo_category = dao.get_category_by_id(algo.fk_category) operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project, algo, algo_category, {}, view_model=view_model) self.operation_service._send_to_cluster(operations, adapter) operation = dao.get_operation_by_id(operations[0].id) operation.status = model_operation.STATUS_FINISHED dao.store_entity(operation) self.operation_service.stop_operation(operations[0].id) operation = dao.get_operation_by_id(operations[0].id) assert operation.status, model_operation.STATUS_FINISHED == "Operation shouldn't have been canceled!"
class TestOperationService(BaseTestCase): """ Test class for the introspection module. Some tests from here do async launches. For those cases Transactional tests won't work. """ def setup_method(self): """ Reset the database before each test. """ self.clean_database() initialize_storage() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user) self.operation_service = OperationService() self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE def teardown_method(self): """ Reset the database when test is done. """ TvbProfile.current.MAX_DISK_SPACE = self.backup_hdd_size self.clean_database() def _assert_no_ddti(self): count = dao.count_datatypes(self.test_project.id, DummyDataTypeIndex) assert 0 == count def _assert_stored_ddti(self, expected_cnt=1): count = dao.count_datatypes(self.test_project.id, DummyDataTypeIndex) assert expected_cnt == count datatype = dao.try_load_last_entity_of_type(self.test_project.id, DummyDataTypeIndex) assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored." return datatype def test_datatypes_groups(self, test_adapter_factory, datatype_group_factory): """ Tests if the dataType group is set correct on the dataTypes resulted from the same operation group. """ all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 0, "There should be no operation" dt_group = datatype_group_factory(project=self.test_project) model = TestModel() test_adapter_factory() adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter1", "TestAdapter1") operations = dao.get_operations_in_group(dt_group.id) for op in operations: model.gid = uuid.uuid4() op_path = StorageInterface().get_project_folder( self.test_project.name, str(op.id)) op.view_model_gid = model.gid.hex op.algorithm = adapter.stored_adapter h5.store_view_model(model, op_path) dao.store_entity(op) all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 2, "Expected two operation groups" assert all_operations[0][2] == 6, "Expected 6 operations in one group" operation_group_id = all_operations[0][3] assert operation_group_id != None, "The operation should be part of a group." self.operation_service.stop_operation(all_operations[1][0]) self.operation_service.stop_operation(all_operations[1][1]) # Make sure operations are executed self.operation_service.launch_operation(all_operations[1][0], False) self.operation_service.launch_operation(all_operations[1][1], False) resulted_datatypes = dao.get_datatype_in_group( operation_group_id=operation_group_id) assert len( resulted_datatypes) >= 2, "Expected at least 2, but: " + str( len(resulted_datatypes)) dt = dao.get_datatype_by_id(resulted_datatypes[0].id) datatype_group = dao.get_datatypegroup_by_op_group_id( operation_group_id) assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect" def test_initiate_operation(self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ test_adapter_factory() adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter1", "TestAdapter1") view_model = TestModel() view_model.test1_val1 = 5 view_model.test1_val2 = 5 adapter.generic_attributes.subject = "Test4242" self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) dts, count = dao.get_values_of_datatype(self.test_project.id, DummyDataTypeIndex) assert count == 1 assert len(dts) == 1 datatype = dao.get_datatype_by_id(dts[0][0]) assert datatype.subject == "Test4242", "Wrong data stored." assert datatype.type == adapter.get_output( )[0].__name__, "Wrong data stored." def test_delete_dt_free_hdd_space(self, test_adapter_factory, operation_factory): """ Launch two operations and give enough available space for user so that both should finish. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") view_model = adapter.get_view_model()() TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(view_model)) self._assert_no_ddti() self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) datatype = self._assert_stored_ddti() # Now free some space and relaunch ProjectService().remove_datatype(self.test_project.id, datatype.gid) self._assert_no_ddti() self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) self._assert_stored_ddti() def test_launch_two_ops_hdd_with_space(self, test_adapter_factory): """ Launch two operations and give enough available space for user so that both should finish. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") view_model = adapter.get_view_model()() TvbProfile.current.MAX_DISK_SPACE = 2 * float( adapter.get_required_disk_size(view_model)) self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) datatype = self._assert_stored_ddti() # Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB) # plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float( adapter.get_required_disk_size(view_model)) self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) self._assert_stored_ddti(2) def test_launch_two_ops_hdd_full_space(self): """ Launch two operations and give available space for user so that the first should finish, but after the update to the user hdd size the second should not. """ adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") view_model = adapter.get_view_model()() TvbProfile.current.MAX_DISK_SPACE = ( 1 + float(adapter.get_required_disk_size(view_model))) self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) datatype = self._assert_stored_ddti() # Now update the maximum disk size to be less than size of the previously resulted dts (transform kB to MB) # plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \ float(adapter.get_required_disk_size(view_model) - 1) with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) self._assert_stored_ddti() def test_launch_operation_hdd_with_space(self): """ Test the actual operation flow by executing a test adapter. """ adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") view_model = adapter.get_view_model()() TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(view_model)) self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) self._assert_stored_ddti() def test_launch_operation_hdd_with_space_started_ops( self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) space_taken_by_started = 100 adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") form = TestAdapterHDDRequiredForm() adapter.submit_form(form) started_operation = model_operation.Operation( None, self.test_user.id, self.test_project.id, adapter.stored_adapter.id, status=model_operation.STATUS_STARTED, estimated_disk_size=space_taken_by_started) view_model = adapter.get_view_model()() dao.store_entity(started_operation) TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(view_model) + space_taken_by_started) self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) self._assert_stored_ddti() def test_launch_operation_hdd_full_space(self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") form = TestAdapterHDDRequiredForm() adapter.submit_form(form) view_model = adapter.get_view_model()() TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(view_model) - 1) with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) self._assert_no_ddti() def test_launch_operation_hdd_full_space_started_ops( self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) space_taken_by_started = 100 adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") form = TestAdapterHDDRequiredForm() adapter.submit_form(form) started_operation = model_operation.Operation( None, self.test_user.id, self.test_project.id, adapter.stored_adapter.id, status=model_operation.STATUS_STARTED, estimated_disk_size=space_taken_by_started) view_model = adapter.get_view_model()() dao.store_entity(started_operation) TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(view_model) + space_taken_by_started - 1) with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) self._assert_no_ddti() def test_stop_operation(self, test_adapter_factory): """ Test that an operation is successfully stopped. """ test_adapter_factory(adapter_class=TestAdapter2) adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter2", "TestAdapter2") view_model = adapter.get_view_model()() view_model.test = 5 algo = adapter.stored_adapter operation = self.operation_service.prepare_operation( self.test_user.id, self.test_project, algo, view_model=view_model) self.operation_service._send_to_cluster(operation, adapter) self.operation_service.stop_operation(operation) operation = dao.get_operation_by_id(operation.id) assert operation.status, model_operation.STATUS_CANCELED == "Operation should have been canceled!" def test_stop_operation_finished(self, test_adapter_factory): """ Test that an operation that is already finished is not changed by the stop operation. """ test_adapter_factory() adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter1", "TestAdapter1") view_model = adapter.get_view_model()() view_model.test1_val1 = 5 view_model.test1_val2 = 5 algo = adapter.stored_adapter operation = self.operation_service.prepare_operation( self.test_user.id, self.test_project, algo, view_model=view_model) self.operation_service._send_to_cluster(operation, adapter) operation = dao.get_operation_by_id(operation.id) operation.status = model_operation.STATUS_FINISHED dao.store_entity(operation) self.operation_service.stop_operation(operation.id) operation = dao.get_operation_by_id(operation.id) assert operation.status, model_operation.STATUS_FINISHED == "Operation shouldn't have been canceled!" def test_fire_operation(self): """ Test preparation of an adapter and launch mechanism. """ adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter1", "TestAdapter1") test_user = TestFactory.create_user(username="******") test_project = TestFactory.create_project(admin=test_user, name="test_project_fire_sim") result = OperationService().fire_operation( adapter, test_user, test_project.id, view_model=adapter.get_view_model()()) assert result.endswith("has finished."), "Operation fail"
class OperationServiceTest(BaseTestCase): """ Test class for the introspection module. Some tests from here do async launches. For those cases Transactional tests won't work. """ def setUp(self): """ Reset the database before each test. """ self.clean_database() initialize_storage() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user) self.operation_service = OperationService() self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE def tearDown(self): """ Reset the database when test is done. """ TvbProfile.current.MAX_DISK_SPACE = self.backup_hdd_size self.clean_database() def test_datatypes_groups(self): """ Tests if the dataType group is set correct on the dataTypes resulted from the same operation group. """ flow_service = FlowService() all_operations = dao.get_filtered_operations(self.test_project.id, None) self.assertEqual(len(all_operations), 0, "There should be no operation") algogroup = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3') group, _ = flow_service.prepare_adapter(self.test_project.id, algogroup) adapter_instance = flow_service.build_adapter_instance(group) data = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]} ## Create Group of operations flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data) all_operations = dao.get_filtered_operations(self.test_project.id, None) self.assertEqual(len(all_operations), 1, "Expected one operation group") self.assertEqual(all_operations[0][2], 2, "Expected 2 operations in group") operation_group_id = all_operations[0][3] self.assertNotEquals(operation_group_id, None, "The operation should be part of a group.") self.operation_service.stop_operation(all_operations[0][0]) self.operation_service.stop_operation(all_operations[0][1]) ## Make sure operations are executed self.operation_service.launch_operation(all_operations[0][0], False) self.operation_service.launch_operation(all_operations[0][1], False) resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id) self.assertTrue(len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes))) dt = dao.get_datatype_by_id(resulted_datatypes[0].id) datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id) self.assertEqual(dt.fk_datatype_group, datatype_group.id, "DataTypeGroup is incorrect") def test_initiate_operation(self): """ Test the actual operation flow by executing a test adapter. """ module = "tvb.tests.framework.adapters.testadapter1" class_name = "TestAdapter1" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) output = adapter.get_output() output_type = output[0].__name__ data = {"test1_val1": 5, "test1_val2": 5} tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") res = self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) self.assertTrue(res.index("has finished.") > 10, "Operation didn't finish") group = dao.find_group(module, class_name) self.assertEqual(group.module, 'tvb.tests.framework.adapters.testadapter1', "Wrong data stored.") self.assertEqual(group.classname, 'TestAdapter1', "Wrong data stored.") dts, count = dao.get_values_of_datatype(self.test_project.id, Datatype1) self.assertEqual(count, 1) self.assertEqual(len(dts), 1) datatype = dao.get_datatype_by_id(dts[0][0]) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") self.assertEqual(datatype.type, output_type, "Wrong data stored.") def test_delete_dt_free_HDD_space(self): """ Launch two operations and give enough available space for user so that both should finish. """ module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) output = adapter.get_output() output_type = output[0].__name__ data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 0) self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 1) datatype = dao.get_datatype_by_id(dts[0][0]) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") self.assertEqual(datatype.type, output_type, "Wrong data stored.") #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB) #plus what is estimated to be required from the next one (transform from B to MB) ProjectService().remove_datatype(self.test_project.id, datatype.gid) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 0) self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 1) datatype = dao.get_datatype_by_id(dts[0][0]) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") self.assertEqual(datatype.type, output_type, "Wrong data stored.") def test_launch_two_ops_HDD_with_space(self): """ Launch two operations and give enough available space for user so that both should finish. """ module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) output = adapter.get_output() output_type = output[0].__name__ data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = 2 * float(adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 1) datatype = dao.get_datatype_by_id(dts[0][0]) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") self.assertEqual(datatype.type, output_type, "Wrong data stored.") #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB) #plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float(adapter.get_required_disk_size(**data)) self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 2) datatype = dao.get_datatype_by_id(dts[1][0]) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") self.assertEqual(datatype.type, output_type, "Wrong data stored.") def test_launch_two_ops_HDD_full_space(self): """ Launch two operations and give available space for user so that the first should finish, but after the update to the user hdd size the second should not. """ module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) output = adapter.get_output() output_type = output[0].__name__ data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = (1 + float(adapter.get_required_disk_size(**data))) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 1) datatype = dao.get_datatype_by_id(dts[0][0]) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") self.assertEqual(datatype.type, output_type, "Wrong data stored.") #Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB) #plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \ float(adapter.get_required_disk_size(**data) - 1) self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 1) def test_launch_operation_HDD_with_space(self): """ Test the actual operation flow by executing a test adapter. """ module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) output = adapter.get_output() output_type = output[0].__name__ data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 1) datatype = dao.get_datatype_by_id(dts[0][0]) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") self.assertEqual(datatype.type, output_type, "Wrong data stored.") def test_launch_operation_HDD_with_space_started_ops(self): """ Test the actual operation flow by executing a test adapter. """ space_taken_by_started = 100 module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) started_operation = model.Operation(self.test_user.id, self.test_project.id, group.id, "", status=model.STATUS_STARTED, estimated_disk_size=space_taken_by_started) dao.store_entity(started_operation) adapter = FlowService().build_adapter_instance(group) output = adapter.get_output() output_type = output[0].__name__ data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 1) datatype = dao.get_datatype_by_id(dts[0][0]) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") self.assertEqual(datatype.type, output_type, "Wrong data stored.") def test_launch_operation_HDD_full_space(self): """ Test the actual operation flow by executing a test adapter. """ module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) - 1) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 0) def test_launch_operation_HDD_full_space_started_ops(self): """ Test the actual operation flow by executing a test adapter. """ space_taken_by_started = 100 module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) started_operation = model.Operation(self.test_user.id, self.test_project.id, group.id, "", status=model.STATUS_STARTED, estimated_disk_size=space_taken_by_started) dao.store_entity(started_operation) adapter = FlowService().build_adapter_instance(group) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started - 1) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 0) def test_stop_operation(self): """ Test that an operation is successfully stopped. """ module = "tvb.tests.framework.adapters.testadapter2" class_name = "TestAdapter2" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) data = {"test": 5} algo_group = adapter.algorithm_group algo_category = dao.get_category_by_id(algo_group.fk_category) algo = dao.get_algorithm_by_group(algo_group.id) operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo, algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data) self.operation_service._send_to_cluster(operations, adapter) self.operation_service.stop_operation(operations[0].id) operation = dao.get_operation_by_id(operations[0].id) self.assertEqual(operation.status, model.STATUS_CANCELED, "Operation should have been canceled!") def test_stop_operation_finished(self): """ Test that an operation that is already finished is not changed by the stop operation. """ module = "tvb.tests.framework.adapters.testadapter1" class_name = "TestAdapter1" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) data = {"test1_val1": 5, 'test1_val2': 5} algo_group = adapter.algorithm_group algo_category = dao.get_category_by_id(algo_group.fk_category) algo = dao.get_algorithm_by_group(algo_group.id) operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo, algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data) self.operation_service._send_to_cluster(operations, adapter) operation = dao.get_operation_by_id(operations[0].id) operation.status = model.STATUS_FINISHED dao.store_entity(operation) self.operation_service.stop_operation(operations[0].id) operation = dao.get_operation_by_id(operations[0].id) self.assertEqual(operation.status, model.STATUS_FINISHED, "Operation shouldn't have been canceled!") def test_array_from_string(self): """ Simple test for parse array on 1d, 2d and 3d array. """ row = {'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'quantifier': 'manual', 'elementType': 'float', 'type': 'array', 'options': None, 'name': 'test'} input_data_string = '[ [1 2 3] [4 5 6]]' output = string2array(input_data_string, ' ', row['elementType']) self.assertEqual(output.shape, (2, 3), "Dimensions not properly parsed") for i in output[0]: self.assertTrue(i in [1, 2, 3]) for i in output[1]: self.assertTrue(i in [4, 5, 6]) input_data_string = '[1, 2, 3, 4, 5, 6]' output = string2array(input_data_string, ',', row['elementType']) self.assertEqual(output.shape, (6,), "Dimensions not properly parsed") for i in output: self.assertTrue(i in [1, 2, 3, 4, 5, 6]) input_data_string = '[ [ [1,1], [2, 2] ], [ [3 ,3], [4,4] ] ]' output = string2array(input_data_string, ',', row['elementType']) self.assertEqual(output.shape, (2, 2, 2), "Wrong dimensions.") for i in output[0][0]: self.assertTrue(i == 1) for i in output[0][1]: self.assertTrue(i == 2) for i in output[1][0]: self.assertTrue(i == 3) for i in output[1][1]: self.assertTrue(i == 4) row = {'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'quantifier': 'manual', 'elementType': 'str', 'type': 'array', 'options': None, 'name': 'test'} input_data_string = '[1, 2, 3, 4, 5, 6]' output = string2array(input_data_string, ',', row['elementType']) for i in output: self.assertTrue(i in [1, 2, 3, 4, 5, 6]) def test_wrong_array_from_string(self): """Test that parsing an array from string is throwing the expected exception when wrong input string""" row = {'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'quantifier': 'manual', 'elementType': 'float', 'type': 'array', 'options': None, 'name': 'test'} input_data_string = '[ [1,2 3] [4,5,6]]' self.assertRaises(ValueError, string2array, input_data_string, ',', row['elementType']) input_data_string = '[ [1,2,wrong], [4, 5, 6]]' self.assertRaises(ValueError, string2array, input_data_string, ',', row['elementType']) row = {'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'quantifier': 'manual', 'elementType': 'str', 'type': 'array', 'options': None, 'name': 'test'} output = string2array(input_data_string, ',', row['elementType']) self.assertEqual(output.shape, (2, 3)) self.assertEqual(output[0][2], 'wrong', 'String data not converted properly') input_data_string = '[ [1,2 3] [4,5,6]]' output = string2array(input_data_string, ',', row['elementType']) self.assertEqual(output[0][1], '2 3') def test_reduce_dimension_component(self): """ This method tests if the data passed to the launch method of the NDimensionArrayAdapter adapter is correct. The passed data should be a list of arrays with one dimension. """ inserted_count = FlowService().get_available_datatypes(self.test_project.id, "tvb.datatypes.arrays.MappedArray")[1] self.assertEqual(inserted_count, 0, "Expected to find no data.") #create an operation algorithm_id = FlowService().get_algorithm_by_module_and_class('tvb.tests.framework.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter')[0].id operation = model.Operation(self.test_user.id, self.test_project.id, algorithm_id, 'test params', meta=json.dumps({DataTypeMetaData.KEY_STATE: "RAW_DATA"}), status=model.STATUS_FINISHED, method_name=ABCAdapter.LAUNCH_METHOD) operation = dao.store_entity(operation) #save the array wrapper in DB adapter_instance = NDimensionArrayAdapter() PARAMS = {} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) inserted_data = FlowService().get_available_datatypes(self.test_project.id, "tvb.datatypes.arrays.MappedArray")[0] self.assertEqual(len(inserted_data), 1, "Problems when inserting data") gid = inserted_data[0][2] entity = dao.get_datatype_by_gid(gid) #from the 3D array do not select any array PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": "requiredDim_1", "input_data_dimensions_1": "", "input_data_dimensions_2": ""} try: self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) self.fail("Test should not pass. The resulted array should be a 1D array.") except Exception: # OK, do nothing; we were expecting to produce a 1D array pass #from the 3D array select only a 1D array first_dim = [gid + '_1_0', 'requiredDim_1'] PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": first_dim, "input_data_dimensions_1": gid + "_2_1"} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) expected_result = entity.array_data[:, 0, 1] actual_result = adapter_instance.launch_param self.assertEqual(len(actual_result), len(expected_result), "Not the same size for results!") self.assertTrue(numpy.equal(actual_result, expected_result).all()) #from the 3D array select a 2D array first_dim = [gid + '_1_0', gid + '_1_1', 'requiredDim_2'] PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": first_dim, "input_data_dimensions_1": gid + "_2_1"} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) expected_result = entity.array_data[slice(0, None), [0, 1], 1] actual_result = adapter_instance.launch_param self.assertEqual(len(actual_result), len(expected_result), "Not the same size for results!") self.assertTrue(numpy.equal(actual_result, expected_result).all()) #from 3D array select 1D array by applying SUM function on the first #dimension and average function on the second dimension PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": ["requiredDim_1", "func_sum"], "input_data_dimensions_1": "func_average", "input_data_dimensions_2": ""} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) aux = numpy.sum(entity.array_data, axis=0) expected_result = numpy.average(aux, axis=0) actual_result = adapter_instance.launch_param self.assertEqual(len(actual_result), len(expected_result), "Not the same size of results!") self.assertTrue(numpy.equal(actual_result, expected_result).all()) #from 3D array select a 2D array and apply op. on the second dimension PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": ["requiredDim_2", "func_sum", "expected_shape_x,512", "operations_x,>"], "input_data_dimensions_1": "", "input_data_dimensions_2": ""} try: self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) self.fail("Test should not pass! The second dimension of the array should be >512.") except Exception: # OK, do nothing; pass
class TestOperationService(BaseTestCase): """ Test class for the introspection module. Some tests from here do async launches. For those cases Transactional tests won't work. TODO: this is still to be refactored, for being huge, with duplicates and many irrelevant checks """ def setup_method(self): """ Reset the database before each test. """ self.clean_database() initialize_storage() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user) self.operation_service = OperationService() self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE def teardown_method(self): """ Reset the database when test is done. """ TvbProfile.current.MAX_DISK_SPACE = self.backup_hdd_size self.clean_database() def _assert_no_dt2(self): count = dao.count_datatypes(self.test_project.id, Datatype2) assert 0 == count def _assert_stored_dt2(self, expected_cnt=1): count = dao.count_datatypes(self.test_project.id, Datatype2) assert expected_cnt == count datatype = dao.try_load_last_entity_of_type(self.test_project.id, Datatype2) assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored." return datatype def test_datatypes_groups(self): """ Tests if the dataType group is set correct on the dataTypes resulted from the same operation group. """ flow_service = FlowService() all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 0, "There should be no operation" adapter_instance = TestFactory.create_adapter('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3') data = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]} ## Create Group of operations flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data) all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 1, "Expected one operation group" assert all_operations[0][2] == 2, "Expected 2 operations in group" operation_group_id = all_operations[0][3] assert operation_group_id != None, "The operation should be part of a group." self.operation_service.stop_operation(all_operations[0][0]) self.operation_service.stop_operation(all_operations[0][1]) ## Make sure operations are executed self.operation_service.launch_operation(all_operations[0][0], False) self.operation_service.launch_operation(all_operations[0][1], False) resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id) assert len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes)) dt = dao.get_datatype_by_id(resulted_datatypes[0].id) datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id) assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect" def test_initiate_operation(self): """ Test the actual operation flow by executing a test adapter. """ module = "tvb.tests.framework.adapters.testadapter1" class_name = "TestAdapter1" adapter = TestFactory.create_adapter(module, class_name) output = adapter.get_output() output_type = output[0].__name__ data = {"test1_val1": 5, "test1_val2": 5} tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") res = self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) assert res.index("has finished.") > 10, "Operation didn't finish" group = dao.get_algorithm_by_module(module, class_name) assert group.module == 'tvb.tests.framework.adapters.testadapter1', "Wrong data stored." assert group.classname == 'TestAdapter1', "Wrong data stored." dts, count = dao.get_values_of_datatype(self.test_project.id, Datatype1) assert count == 1 assert len(dts) == 1 datatype = dao.get_datatype_by_id(dts[0][0]) assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored." assert datatype.type == output_type, "Wrong data stored." def test_delete_dt_free_HDD_space(self): """ Launch two operations and give enough available space for user so that both should finish. """ adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self._assert_no_dt2() self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) datatype = self._assert_stored_dt2() # Now free some space and relaunch ProjectService().remove_datatype(self.test_project.id, datatype.gid) self._assert_no_dt2() self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) self._assert_stored_dt2() def test_launch_two_ops_HDD_with_space(self): """ Launch two operations and give enough available space for user so that both should finish. """ adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = 2 * float(adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) datatype = self._assert_stored_dt2() #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB) #plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float(adapter.get_required_disk_size(**data)) self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) self._assert_stored_dt2(2) def test_launch_two_ops_HDD_full_space(self): """ Launch two operations and give available space for user so that the first should finish, but after the update to the user hdd size the second should not. """ adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = (1 + float(adapter.get_required_disk_size(**data))) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) datatype = self._assert_stored_dt2() #Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB) #plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \ float(adapter.get_required_disk_size(**data) - 1) with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user,self.test_project.id, adapter, tmp_folder, **data) self._assert_stored_dt2() def test_launch_operation_HDD_with_space(self): """ Test the actual operation flow by executing a test adapter. """ adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) self._assert_stored_dt2() def test_launch_operation_HDD_with_space_started_ops(self): """ Test the actual operation flow by executing a test adapter. """ space_taken_by_started = 100 adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") started_operation = model.Operation(self.test_user.id, self.test_project.id, adapter.stored_adapter.id, "", status=model.STATUS_STARTED, estimated_disk_size=space_taken_by_started) dao.store_entity(started_operation) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) self._assert_stored_dt2() def test_launch_operation_HDD_full_space(self): """ Test the actual operation flow by executing a test adapter. """ adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) - 1) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) self._assert_no_dt2() def test_launch_operation_HDD_full_space_started_ops(self): """ Test the actual operation flow by executing a test adapter. """ space_taken_by_started = 100 adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") started_operation = model.Operation(self.test_user.id, self.test_project.id, adapter.stored_adapter.id, "", status=model.STATUS_STARTED, estimated_disk_size=space_taken_by_started) dao.store_entity(started_operation) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started - 1) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user,self.test_project.id, adapter, tmp_folder, **data) self._assert_no_dt2() def test_stop_operation(self): """ Test that an operation is successfully stopped. """ adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter2", "TestAdapter2") data = {"test": 5} algo = adapter.stored_adapter algo_category = dao.get_category_by_id(algo.fk_category) operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo, algo_category, {}, **data) self.operation_service._send_to_cluster(operations, adapter) self.operation_service.stop_operation(operations[0].id) operation = dao.get_operation_by_id(operations[0].id) assert operation.status, model.STATUS_CANCELED == "Operation should have been canceled!" def test_stop_operation_finished(self): """ Test that an operation that is already finished is not changed by the stop operation. """ adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter1", "TestAdapter1") data = {"test1_val1": 5, 'test1_val2': 5} algo = adapter.stored_adapter algo_category = dao.get_category_by_id(algo.fk_category) operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo, algo_category, {}, **data) self.operation_service._send_to_cluster(operations, adapter) operation = dao.get_operation_by_id(operations[0].id) operation.status = model.STATUS_FINISHED dao.store_entity(operation) self.operation_service.stop_operation(operations[0].id) operation = dao.get_operation_by_id(operations[0].id) assert operation.status, model.STATUS_FINISHED == "Operation shouldn't have been canceled!" def test_array_from_string(self): """ Simple test for parse array on 1d, 2d and 3d array. """ row = {'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'elementType': 'float', 'type': 'array', 'options': None, 'name': 'test'} input_data_string = '[ [1 2 3] [4 5 6]]' output = string2array(input_data_string, ' ', row['elementType']) assert output.shape, (2, 3) == "Dimensions not properly parsed" for i in output[0]: assert i in [1, 2, 3] for i in output[1]: assert i in [4, 5, 6] input_data_string = '[1, 2, 3, 4, 5, 6]' output = string2array(input_data_string, ',', row['elementType']) assert output.shape == (6,), "Dimensions not properly parsed" for i in output: assert i in [1, 2, 3, 4, 5, 6] input_data_string = '[ [ [1,1], [2, 2] ], [ [3 ,3], [4,4] ] ]' output = string2array(input_data_string, ',', row['elementType']) assert output.shape == (2, 2, 2), "Wrong dimensions." for i in output[0][0]: assert i == 1 for i in output[0][1]: assert i == 2 for i in output[1][0]: assert i == 3 for i in output[1][1]: assert i == 4 row = {'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'elementType': 'str', 'type': 'array', 'options': None, 'name': 'test'} input_data_string = '[1, 2, 3, 4, 5, 6]' output = string2array(input_data_string, ',', row['elementType']) for i in output: assert i in [1, 2, 3, 4, 5, 6] def test_wrong_array_from_string(self): """Test that parsing an array from string is throwing the expected exception when wrong input string""" row = {'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'elementType': 'float', 'type': 'array', 'options': None, 'name': 'test'} input_data_string = '[ [1,2 3] [4,5,6]]' with pytest.raises(ValueError): string2array(input_data_string, ',', row['elementType']) input_data_string = '[ [1,2,wrong], [4, 5, 6]]' with pytest.raises(ValueError): string2array(input_data_string, ',', row['elementType']) row = {'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'elementType': 'str', 'type': 'array', 'options': None, 'name': 'test'} output = string2array(input_data_string, ',', row['elementType']) assert output.shape == (2, 3) assert output[0][2] == 'wrong', 'String data not converted properly' input_data_string = '[ [1,2 3] [4,5,6]]' output = string2array(input_data_string, ',', row['elementType']) assert output[0][1] == '2 3' def test_reduce_dimension_component(self): """ This method tests if the data passed to the launch method of the NDimensionArrayAdapter adapter is correct. The passed data should be a list of arrays with one dimension. """ inserted_count = FlowService().get_available_datatypes(self.test_project.id, "tvb.datatypes.arrays.MappedArray")[1] assert inserted_count == 0, "Expected to find no data." #create an operation algorithm_id = FlowService().get_algorithm_by_module_and_class('tvb.tests.framework.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter').id operation = model.Operation(self.test_user.id, self.test_project.id, algorithm_id, 'test params', meta=json.dumps({DataTypeMetaData.KEY_STATE: "RAW_DATA"}), status=model.STATUS_FINISHED) operation = dao.store_entity(operation) #save the array wrapper in DB adapter_instance = NDimensionArrayAdapter() PARAMS = {} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) inserted_data = FlowService().get_available_datatypes(self.test_project.id, "tvb.datatypes.arrays.MappedArray")[0] assert len(inserted_data) == 1, "Problems when inserting data" gid = inserted_data[0][2] entity = dao.get_datatype_by_gid(gid) #from the 3D array do not select any array PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": "requiredDim_1", "input_data_dimensions_1": "", "input_data_dimensions_2": ""} try: self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) raise AssertionError("Test should not pass. The resulted array should be a 1D array.") except Exception: # OK, do nothing; we were expecting to produce a 1D array pass #from the 3D array select only a 1D array first_dim = [gid + '_1_0', 'requiredDim_1'] PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": first_dim, "input_data_dimensions_1": gid + "_2_1"} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) expected_result = entity.array_data[:, 0, 1] actual_result = adapter_instance.launch_param assert len(actual_result) == len(expected_result), "Not the same size for results!" assert numpy.equal(actual_result, expected_result).all() #from the 3D array select a 2D array first_dim = [gid + '_1_0', gid + '_1_1', 'requiredDim_2'] PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": first_dim, "input_data_dimensions_1": gid + "_2_1"} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) expected_result = entity.array_data[slice(0, None), [0, 1], 1] actual_result = adapter_instance.launch_param assert len(actual_result) == len(expected_result), "Not the same size for results!" assert numpy.equal(actual_result, expected_result).all() #from 3D array select 1D array by applying SUM function on the first #dimension and average function on the second dimension PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": ["requiredDim_1", "func_sum"], "input_data_dimensions_1": "func_average", "input_data_dimensions_2": ""} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) aux = numpy.sum(entity.array_data, axis=0) expected_result = numpy.average(aux, axis=0) actual_result = adapter_instance.launch_param assert len(actual_result) == len(expected_result), "Not the same size of results!" assert numpy.equal(actual_result, expected_result).all() #from 3D array select a 2D array and apply op. on the second dimension PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": ["requiredDim_2", "func_sum", "expected_shape_x,512", "operations_x,>"], "input_data_dimensions_1": "", "input_data_dimensions_2": ""} try: self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) raise AssertionError("Test should not pass! The second dimension of the array should be >512.") except Exception: # OK, do nothing; pass
class OperationServiceTest(BaseTestCase): """ Test class for the introspection module. Some tests from here do async launches. For those cases Transactional tests won't work. TODO: this is still to be refactored, for being huge, with duplicates and many irrelevant checks """ def setUp(self): """ Reset the database before each test. """ self.clean_database() initialize_storage() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user) self.operation_service = OperationService() self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE def tearDown(self): """ Reset the database when test is done. """ TvbProfile.current.MAX_DISK_SPACE = self.backup_hdd_size self.clean_database() def _assert_no_dt2(self): count = dao.count_datatypes(self.test_project.id, Datatype2) self.assertEqual(0, count) def _assert_stored_dt2(self, expected_cnt=1): count = dao.count_datatypes(self.test_project.id, Datatype2) self.assertEqual(expected_cnt, count) datatype = dao.try_load_last_entity_of_type(self.test_project.id, Datatype2) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") return datatype def test_datatypes_groups(self): """ Tests if the dataType group is set correct on the dataTypes resulted from the same operation group. """ flow_service = FlowService() all_operations = dao.get_filtered_operations(self.test_project.id, None) self.assertEqual(len(all_operations), 0, "There should be no operation") algogroup = dao.find_group("tvb.tests.framework.adapters.testadapter3", "TestAdapter3") group, _ = flow_service.prepare_adapter(self.test_project.id, algogroup) adapter_instance = flow_service.build_adapter_instance(group) data = {model.RANGE_PARAMETER_1: "param_5", "param_5": [1, 2]} ## Create Group of operations flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data) all_operations = dao.get_filtered_operations(self.test_project.id, None) self.assertEqual(len(all_operations), 1, "Expected one operation group") self.assertEqual(all_operations[0][2], 2, "Expected 2 operations in group") operation_group_id = all_operations[0][3] self.assertNotEquals(operation_group_id, None, "The operation should be part of a group.") self.operation_service.stop_operation(all_operations[0][0]) self.operation_service.stop_operation(all_operations[0][1]) ## Make sure operations are executed self.operation_service.launch_operation(all_operations[0][0], False) self.operation_service.launch_operation(all_operations[0][1], False) resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id) self.assertTrue(len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes))) dt = dao.get_datatype_by_id(resulted_datatypes[0].id) datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id) self.assertEqual(dt.fk_datatype_group, datatype_group.id, "DataTypeGroup is incorrect") def test_initiate_operation(self): """ Test the actual operation flow by executing a test adapter. """ module = "tvb.tests.framework.adapters.testadapter1" class_name = "TestAdapter1" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) output = adapter.get_output() output_type = output[0].__name__ data = {"test1_val1": 5, "test1_val2": 5} tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") res = self.operation_service.initiate_operation( self.test_user, self.test_project.id, adapter, tmp_folder, **data ) self.assertTrue(res.index("has finished.") > 10, "Operation didn't finish") group = dao.find_group(module, class_name) self.assertEqual(group.module, "tvb.tests.framework.adapters.testadapter1", "Wrong data stored.") self.assertEqual(group.classname, "TestAdapter1", "Wrong data stored.") dts, count = dao.get_values_of_datatype(self.test_project.id, Datatype1) self.assertEqual(count, 1) self.assertEqual(len(dts), 1) datatype = dao.get_datatype_by_id(dts[0][0]) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") self.assertEqual(datatype.type, output_type, "Wrong data stored.") def test_delete_dt_free_HDD_space(self): """ Launch two operations and give enough available space for user so that both should finish. """ module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self._assert_no_dt2() self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) datatype = self._assert_stored_dt2() # Now free some space and relaunch ProjectService().remove_datatype(self.test_project.id, datatype.gid) self._assert_no_dt2() self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) self._assert_stored_dt2() def test_launch_two_ops_HDD_with_space(self): """ Launch two operations and give enough available space for user so that both should finish. """ module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = 2 * float(adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) datatype = self._assert_stored_dt2() # Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB) # plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float(adapter.get_required_disk_size(**data)) self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) self._assert_stored_dt2(2) def test_launch_two_ops_HDD_full_space(self): """ Launch two operations and give available space for user so that the first should finish, but after the update to the user hdd size the second should not. """ module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = 1 + float(adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) datatype = self._assert_stored_dt2() # Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB) # plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + float( adapter.get_required_disk_size(**data) - 1 ) self.assertRaises( NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user, self.test_project.id, adapter, tmp_folder, **data ) self._assert_stored_dt2() def test_launch_operation_HDD_with_space(self): """ Test the actual operation flow by executing a test adapter. """ module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) self._assert_stored_dt2() def test_launch_operation_HDD_with_space_started_ops(self): """ Test the actual operation flow by executing a test adapter. """ space_taken_by_started = 100 module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) started_operation = model.Operation( self.test_user.id, self.test_project.id, group.id, "", status=model.STATUS_STARTED, estimated_disk_size=space_taken_by_started, ) dao.store_entity(started_operation) adapter = FlowService().build_adapter_instance(group) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) self._assert_stored_dt2() def test_launch_operation_HDD_full_space(self): """ Test the actual operation flow by executing a test adapter. """ module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) - 1) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.assertRaises( NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user, self.test_project.id, adapter, tmp_folder, **data ) self._assert_no_dt2() def test_launch_operation_HDD_full_space_started_ops(self): """ Test the actual operation flow by executing a test adapter. """ space_taken_by_started = 100 module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) started_operation = model.Operation( self.test_user.id, self.test_project.id, group.id, "", status=model.STATUS_STARTED, estimated_disk_size=space_taken_by_started, ) dao.store_entity(started_operation) adapter = FlowService().build_adapter_instance(group) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started - 1) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.assertRaises( NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user, self.test_project.id, adapter, tmp_folder, **data ) self._assert_no_dt2() def test_stop_operation(self): """ Test that an operation is successfully stopped. """ module = "tvb.tests.framework.adapters.testadapter2" class_name = "TestAdapter2" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) data = {"test": 5} algo_group = adapter.algorithm_group algo_category = dao.get_category_by_id(algo_group.fk_category) algo = dao.get_algorithm_by_group(algo_group.id) operations, _ = self.operation_service.prepare_operations( self.test_user.id, self.test_project.id, algo, algo_category, {}, **data ) self.operation_service._send_to_cluster(operations, adapter) self.operation_service.stop_operation(operations[0].id) operation = dao.get_operation_by_id(operations[0].id) self.assertEqual(operation.status, model.STATUS_CANCELED, "Operation should have been canceled!") def test_stop_operation_finished(self): """ Test that an operation that is already finished is not changed by the stop operation. """ module = "tvb.tests.framework.adapters.testadapter1" class_name = "TestAdapter1" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) data = {"test1_val1": 5, "test1_val2": 5} algo_group = adapter.algorithm_group algo_category = dao.get_category_by_id(algo_group.fk_category) algo = dao.get_algorithm_by_group(algo_group.id) operations, _ = self.operation_service.prepare_operations( self.test_user.id, self.test_project.id, algo, algo_category, {}, **data ) self.operation_service._send_to_cluster(operations, adapter) operation = dao.get_operation_by_id(operations[0].id) operation.status = model.STATUS_FINISHED dao.store_entity(operation) self.operation_service.stop_operation(operations[0].id) operation = dao.get_operation_by_id(operations[0].id) self.assertEqual(operation.status, model.STATUS_FINISHED, "Operation shouldn't have been canceled!") def test_array_from_string(self): """ Simple test for parse array on 1d, 2d and 3d array. """ row = { "description": "test.", "default": "None", "required": True, "label": "test: ", "attributes": None, "quantifier": "manual", "elementType": "float", "type": "array", "options": None, "name": "test", } input_data_string = "[ [1 2 3] [4 5 6]]" output = string2array(input_data_string, " ", row["elementType"]) self.assertEqual(output.shape, (2, 3), "Dimensions not properly parsed") for i in output[0]: self.assertTrue(i in [1, 2, 3]) for i in output[1]: self.assertTrue(i in [4, 5, 6]) input_data_string = "[1, 2, 3, 4, 5, 6]" output = string2array(input_data_string, ",", row["elementType"]) self.assertEqual(output.shape, (6,), "Dimensions not properly parsed") for i in output: self.assertTrue(i in [1, 2, 3, 4, 5, 6]) input_data_string = "[ [ [1,1], [2, 2] ], [ [3 ,3], [4,4] ] ]" output = string2array(input_data_string, ",", row["elementType"]) self.assertEqual(output.shape, (2, 2, 2), "Wrong dimensions.") for i in output[0][0]: self.assertTrue(i == 1) for i in output[0][1]: self.assertTrue(i == 2) for i in output[1][0]: self.assertTrue(i == 3) for i in output[1][1]: self.assertTrue(i == 4) row = { "description": "test.", "default": "None", "required": True, "label": "test: ", "attributes": None, "quantifier": "manual", "elementType": "str", "type": "array", "options": None, "name": "test", } input_data_string = "[1, 2, 3, 4, 5, 6]" output = string2array(input_data_string, ",", row["elementType"]) for i in output: self.assertTrue(i in [1, 2, 3, 4, 5, 6]) def test_wrong_array_from_string(self): """Test that parsing an array from string is throwing the expected exception when wrong input string""" row = { "description": "test.", "default": "None", "required": True, "label": "test: ", "attributes": None, "quantifier": "manual", "elementType": "float", "type": "array", "options": None, "name": "test", } input_data_string = "[ [1,2 3] [4,5,6]]" self.assertRaises(ValueError, string2array, input_data_string, ",", row["elementType"]) input_data_string = "[ [1,2,wrong], [4, 5, 6]]" self.assertRaises(ValueError, string2array, input_data_string, ",", row["elementType"]) row = { "description": "test.", "default": "None", "required": True, "label": "test: ", "attributes": None, "quantifier": "manual", "elementType": "str", "type": "array", "options": None, "name": "test", } output = string2array(input_data_string, ",", row["elementType"]) self.assertEqual(output.shape, (2, 3)) self.assertEqual(output[0][2], "wrong", "String data not converted properly") input_data_string = "[ [1,2 3] [4,5,6]]" output = string2array(input_data_string, ",", row["elementType"]) self.assertEqual(output[0][1], "2 3") def test_reduce_dimension_component(self): """ This method tests if the data passed to the launch method of the NDimensionArrayAdapter adapter is correct. The passed data should be a list of arrays with one dimension. """ inserted_count = FlowService().get_available_datatypes( self.test_project.id, "tvb.datatypes.arrays.MappedArray" )[1] self.assertEqual(inserted_count, 0, "Expected to find no data.") # create an operation algorithm_id = ( FlowService() .get_algorithm_by_module_and_class( "tvb.tests.framework.adapters.ndimensionarrayadapter", "NDimensionArrayAdapter" )[0] .id ) operation = model.Operation( self.test_user.id, self.test_project.id, algorithm_id, "test params", meta=json.dumps({DataTypeMetaData.KEY_STATE: "RAW_DATA"}), status=model.STATUS_FINISHED, ) operation = dao.store_entity(operation) # save the array wrapper in DB adapter_instance = NDimensionArrayAdapter() PARAMS = {} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) inserted_data = FlowService().get_available_datatypes(self.test_project.id, "tvb.datatypes.arrays.MappedArray")[ 0 ] self.assertEqual(len(inserted_data), 1, "Problems when inserting data") gid = inserted_data[0][2] entity = dao.get_datatype_by_gid(gid) # from the 3D array do not select any array PARAMS = { "python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": "requiredDim_1", "input_data_dimensions_1": "", "input_data_dimensions_2": "", } try: self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) self.fail("Test should not pass. The resulted array should be a 1D array.") except Exception: # OK, do nothing; we were expecting to produce a 1D array pass # from the 3D array select only a 1D array first_dim = [gid + "_1_0", "requiredDim_1"] PARAMS = { "python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": first_dim, "input_data_dimensions_1": gid + "_2_1", } self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) expected_result = entity.array_data[:, 0, 1] actual_result = adapter_instance.launch_param self.assertEqual(len(actual_result), len(expected_result), "Not the same size for results!") self.assertTrue(numpy.equal(actual_result, expected_result).all()) # from the 3D array select a 2D array first_dim = [gid + "_1_0", gid + "_1_1", "requiredDim_2"] PARAMS = { "python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": first_dim, "input_data_dimensions_1": gid + "_2_1", } self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) expected_result = entity.array_data[slice(0, None), [0, 1], 1] actual_result = adapter_instance.launch_param self.assertEqual(len(actual_result), len(expected_result), "Not the same size for results!") self.assertTrue(numpy.equal(actual_result, expected_result).all()) # from 3D array select 1D array by applying SUM function on the first # dimension and average function on the second dimension PARAMS = { "python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": ["requiredDim_1", "func_sum"], "input_data_dimensions_1": "func_average", "input_data_dimensions_2": "", } self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) aux = numpy.sum(entity.array_data, axis=0) expected_result = numpy.average(aux, axis=0) actual_result = adapter_instance.launch_param self.assertEqual(len(actual_result), len(expected_result), "Not the same size of results!") self.assertTrue(numpy.equal(actual_result, expected_result).all()) # from 3D array select a 2D array and apply op. on the second dimension PARAMS = { "python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": ["requiredDim_2", "func_sum", "expected_shape_x,512", "operations_x,>"], "input_data_dimensions_1": "", "input_data_dimensions_2": "", } try: self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) self.fail("Test should not pass! The second dimension of the array should be >512.") except Exception: # OK, do nothing; pass
class TestOperationService(BaseTestCase): """ Test class for the introspection module. Some tests from here do async launches. For those cases Transactional tests won't work. TODO: this is still to be refactored, for being huge, with duplicates and many irrelevant checks """ def setup_method(self): """ Reset the database before each test. """ self.clean_database() initialize_storage() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user) self.operation_service = OperationService() self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE def teardown_method(self): """ Reset the database when test is done. """ TvbProfile.current.MAX_DISK_SPACE = self.backup_hdd_size self.clean_database() def _assert_no_dt2(self): count = dao.count_datatypes(self.test_project.id, DummyDataTypeIndex) assert 0 == count def _assert_stored_dt2(self, expected_cnt=1): count = dao.count_datatypes(self.test_project.id, Datatype2) assert expected_cnt == count datatype = dao.try_load_last_entity_of_type(self.test_project.id, Datatype2) assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored." return datatype def test_datatypes_groups(self, test_adapter_factory): """ Tests if the dataType group is set correct on the dataTypes resulted from the same operation group. """ all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 0, "There should be no operation" test_adapter_factory(TestAdapter3) algo = dao.get_algorithm_by_module( 'tvb.tests.framework.adapters.testadapter3', 'TestAdapter3') adapter_instance = ABCAdapter.build_adapter(algo) data = {model_burst.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]} ## Create Group of operations FlowService().fire_operation(adapter_instance, self.test_user, self.test_project.id, **data) all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 1, "Expected one operation group" assert all_operations[0][2] == 2, "Expected 2 operations in group" operation_group_id = all_operations[0][3] assert operation_group_id != None, "The operation should be part of a group." self.operation_service.stop_operation(all_operations[0][0]) self.operation_service.stop_operation(all_operations[0][1]) ## Make sure operations are executed self.operation_service.launch_operation(all_operations[0][0], False) self.operation_service.launch_operation(all_operations[0][1], False) resulted_datatypes = dao.get_datatype_in_group( operation_group_id=operation_group_id) assert len( resulted_datatypes) >= 2, "Expected at least 2, but: " + str( len(resulted_datatypes)) dt = dao.get_datatype_by_id(resulted_datatypes[0].id) datatype_group = dao.get_datatypegroup_by_op_group_id( operation_group_id) assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect" def test_initiate_operation(self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ module = "tvb.tests.framework.adapters.testadapter1" class_name = "TestAdapter1" test_adapter_factory() adapter = TestFactory.create_adapter(module, class_name) output = adapter.get_output() output_type = output[0].__name__ data = {"test1_val1": 5, "test1_val2": 5} tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, **data) group = dao.get_algorithm_by_module(module, class_name) assert group.module == 'tvb.tests.framework.adapters.testadapter1', "Wrong data stored." assert group.classname == 'TestAdapter1', "Wrong data stored." dts, count = dao.get_values_of_datatype(self.test_project.id, DummyDataTypeIndex) assert count == 1 assert len(dts) == 1 datatype = dao.get_datatype_by_id(dts[0][0]) assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored." assert datatype.type == output_type, "Wrong data stored." def test_delete_dt_free_hdd_space(self, test_adapter_factory, operation_factory): """ Launch two operations and give enough available space for user so that both should finish. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self._assert_no_dt2() self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, **data) datatype = self._assert_stored_dt2() # Now free some space and relaunch ProjectService().remove_datatype(self.test_project.id, datatype.gid) self._assert_no_dt2() self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, **data) self._assert_stored_dt2() def test_launch_two_ops_hdd_with_space(self): """ Launch two operations and give enough available space for user so that both should finish. """ adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = 2 * float( adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, **data) datatype = self._assert_stored_dt2() #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB) #plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float( adapter.get_required_disk_size(**data)) self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, **data) self._assert_stored_dt2(2) def test_launch_two_ops_hdd_full_space(self): """ Launch two operations and give available space for user so that the first should finish, but after the update to the user hdd size the second should not. """ adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = ( 1 + float(adapter.get_required_disk_size(**data))) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, **data) datatype = self._assert_stored_dt2() #Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB) #plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \ float(adapter.get_required_disk_size(**data) - 1) with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, **data) self._assert_stored_dt2() def test_launch_operation_hdd_with_space(self): """ Test the actual operation flow by executing a test adapter. """ adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, **data) self._assert_stored_dt2() def test_launch_operation_hdd_with_space_started_ops( self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) space_taken_by_started = 100 adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") form = TestAdapterHDDRequiredForm() form.fill_from_post({'_test': "100"}) adapter.submit_form(form) started_operation = model_operation.Operation( self.test_user.id, self.test_project.id, adapter.stored_adapter.id, "", status=model_operation.STATUS_STARTED, estimated_disk_size=space_taken_by_started) dao.store_entity(started_operation) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(**data) + space_taken_by_started) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, **data) self._assert_stored_dt2() def test_launch_operation_hdd_full_space(self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") form = TestAdapterHDDRequiredForm() adapter.submit_form(form) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(**data) - 1) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, **data) self._assert_no_dt2() def test_launch_operation_hdd_full_space_started_ops( self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) space_taken_by_started = 100 adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") form = TestAdapterHDDRequiredForm() adapter.submit_form(form) started_operation = model_operation.Operation( self.test_user.id, self.test_project.id, adapter.stored_adapter.id, "", status=model_operation.STATUS_STARTED, estimated_disk_size=space_taken_by_started) dao.store_entity(started_operation) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(**data) + space_taken_by_started - 1) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, **data) self._assert_no_dt2() def test_stop_operation(self, test_adapter_factory): """ Test that an operation is successfully stopped. """ test_adapter_factory(adapter_class=TestAdapter2) adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter2", "TestAdapter2") data = {"test": 5} algo = adapter.stored_adapter algo_category = dao.get_category_by_id(algo.fk_category) operations, _ = self.operation_service.prepare_operations( self.test_user.id, self.test_project, algo, algo_category, {}, **data) self.operation_service._send_to_cluster(operations, adapter) self.operation_service.stop_operation(operations[0].id) operation = dao.get_operation_by_id(operations[0].id) assert operation.status, model_operation.STATUS_CANCELED == "Operation should have been canceled!" def test_stop_operation_finished(self, test_adapter_factory): """ Test that an operation that is already finished is not changed by the stop operation. """ test_adapter_factory() adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter1", "TestAdapter1") data = {"test1_val1": 5, 'test1_val2': 5} algo = adapter.stored_adapter algo_category = dao.get_category_by_id(algo.fk_category) operations, _ = self.operation_service.prepare_operations( self.test_user.id, self.test_project, algo, algo_category, {}, **data) self.operation_service._send_to_cluster(operations, adapter) operation = dao.get_operation_by_id(operations[0].id) operation.status = model_operation.STATUS_FINISHED dao.store_entity(operation) self.operation_service.stop_operation(operations[0].id) operation = dao.get_operation_by_id(operations[0].id) assert operation.status, model_operation.STATUS_FINISHED == "Operation shouldn't have been canceled!" def test_array_from_string(self): """ Simple test for parse array on 1d, 2d and 3d array. """ row = { 'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'elementType': 'float', 'type': 'array', 'options': None, 'name': 'test' } input_data_string = '[ [1 2 3] [4 5 6]]' output = string2array(input_data_string, ' ', row['elementType']) assert output.shape, (2, 3) == "Dimensions not properly parsed" for i in output[0]: assert i in [1, 2, 3] for i in output[1]: assert i in [4, 5, 6] input_data_string = '[1, 2, 3, 4, 5, 6]' output = string2array(input_data_string, ',', row['elementType']) assert output.shape == (6, ), "Dimensions not properly parsed" for i in output: assert i in [1, 2, 3, 4, 5, 6] input_data_string = '[ [ [1,1], [2, 2] ], [ [3 ,3], [4,4] ] ]' output = string2array(input_data_string, ',', row['elementType']) assert output.shape == (2, 2, 2), "Wrong dimensions." for i in output[0][0]: assert i == 1 for i in output[0][1]: assert i == 2 for i in output[1][0]: assert i == 3 for i in output[1][1]: assert i == 4 row = { 'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'elementType': 'str', 'type': 'array', 'options': None, 'name': 'test' } input_data_string = '[1, 2, 3, 4, 5, 6]' output = string2array(input_data_string, ',', row['elementType']) for i in output: assert i in [1, 2, 3, 4, 5, 6] def test_wrong_array_from_string(self): """Test that parsing an array from string is throwing the expected exception when wrong input string""" row = { 'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'elementType': 'float', 'type': 'array', 'options': None, 'name': 'test' } input_data_string = '[ [1,2 3] [4,5,6]]' with pytest.raises(ValueError): string2array(input_data_string, ',', row['elementType']) input_data_string = '[ [1,2,wrong], [4, 5, 6]]' with pytest.raises(ValueError): string2array(input_data_string, ',', row['elementType']) row = { 'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'elementType': 'str', 'type': 'array', 'options': None, 'name': 'test' } output = string2array(input_data_string, ',', row['elementType']) assert output.shape == (2, 3) assert output[0][2] == 'wrong', 'String data not converted properly' input_data_string = '[ [1,2 3] [4,5,6]]' output = string2array(input_data_string, ',', row['elementType']) assert output[0][1] == '2 3' def test_reduce_dimension_component(self): """ This method tests if the data passed to the launch method of the NDimensionArrayAdapter adapter is correct. The passed data should be a list of arrays with one dimension. """ inserted_count = FlowService().get_available_datatypes( self.test_project.id, "tvb.datatypes.arrays.MappedArray")[1] assert inserted_count == 0, "Expected to find no data." #create an operation algorithm_id = FlowService().get_algorithm_by_module_and_class( 'tvb.tests.framework.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter').id operation = model_operation.Operation( self.test_user.id, self.test_project.id, algorithm_id, 'test params', meta=json.dumps({DataTypeMetaData.KEY_STATE: "RAW_DATA"}), status=model_operation.STATUS_FINISHED) operation = dao.store_entity(operation) #save the array wrapper in DB adapter_instance = NDimensionArrayAdapter() PARAMS = {} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) inserted_data = FlowService().get_available_datatypes( self.test_project.id, "tvb.datatypes.arrays.MappedArray")[0] assert len(inserted_data) == 1, "Problems when inserting data" gid = inserted_data[0][2] entity = dao.get_datatype_by_gid(gid) #from the 3D array do not select any array PARAMS = { "python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": "requiredDim_1", "input_data_dimensions_1": "", "input_data_dimensions_2": "" } try: self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) raise AssertionError( "Test should not pass. The resulted array should be a 1D array." ) except Exception: # OK, do nothing; we were expecting to produce a 1D array pass #from the 3D array select only a 1D array first_dim = [gid + '_1_0', 'requiredDim_1'] PARAMS = { "python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": first_dim, "input_data_dimensions_1": gid + "_2_1" } self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) expected_result = entity.array_data[:, 0, 1] actual_result = adapter_instance.launch_param assert len(actual_result) == len( expected_result), "Not the same size for results!" assert numpy.equal(actual_result, expected_result).all() #from the 3D array select a 2D array first_dim = [gid + '_1_0', gid + '_1_1', 'requiredDim_2'] PARAMS = { "python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": first_dim, "input_data_dimensions_1": gid + "_2_1" } self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) expected_result = entity.array_data[slice(0, None), [0, 1], 1] actual_result = adapter_instance.launch_param assert len(actual_result) == len( expected_result), "Not the same size for results!" assert numpy.equal(actual_result, expected_result).all() #from 3D array select 1D array by applying SUM function on the first #dimension and average function on the second dimension PARAMS = { "python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": ["requiredDim_1", "func_sum"], "input_data_dimensions_1": "func_average", "input_data_dimensions_2": "" } self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) aux = numpy.sum(entity.array_data, axis=0) expected_result = numpy.average(aux, axis=0) actual_result = adapter_instance.launch_param assert len(actual_result) == len( expected_result), "Not the same size of results!" assert numpy.equal(actual_result, expected_result).all() #from 3D array select a 2D array and apply op. on the second dimension PARAMS = { "python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": [ "requiredDim_2", "func_sum", "expected_shape_x,512", "operations_x,>" ], "input_data_dimensions_1": "", "input_data_dimensions_2": "" } try: self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) raise AssertionError( "Test should not pass! The second dimension of the array should be >512." ) except Exception: # OK, do nothing; pass