def cancel_all_operations(self): """ To make sure that no running operations are left which could make some other test started afterwards to fail, cancel all operations after each test. """ LOGGER.info("Stopping all operations.") op_service = OperationService() operations = self.get_all_entities(model.Operation) for operation in operations: op_service.stop_operation(operation.id)
def cancel_all_operations(self): """ To make sure that no running operations are left which could make some other test started afterwards to fail, cancel all operations after each test. """ LOGGER.info("Stopping all operations.") op_service = OperationService() operations = self.get_all_entities(model.Operation) for operation in operations: op_service.stop_operation(operation.id)
def cancel_all_operations(self): """ To make sure that no running operations are left which could make some other test started afterwards to fail, cancel all operations after each test. """ LOGGER.info("Stopping all operations.") op_service = OperationService() operations = self.get_all_entities(Operation) for operation in operations: try: op_service.stop_operation(operation.id) except Exception: # Ignore potential wrongly written operations by other unit-tests pass
def cancel_or_remove_operation(self, operation_id, is_group, remove_after_stop=False): """ Stop the operation given by operation_id. If is_group is true stop all the operations from that group. """ # Load before we remove, to have its data in memory here burst_config = BurstService.get_burst_for_operation_id(operation_id, is_group) if burst_config is not None: self.burst_service.mark_burst_finished(burst_config, BurstConfiguration.BURST_CANCELED, store_h5_file=False) while GROUP_BURST_PENDING.get(burst_config.id, False): pass GROUP_BURST_PENDING.pop(burst_config.id, False) result = OperationService.stop_operation(operation_id, is_group, remove_after_stop) if remove_after_stop: current_burst = self.context.burst_config if (current_burst is not None and burst_config is not None and current_burst.id == burst_config.id and ((current_burst.fk_simulation == operation_id and not is_group) or (current_burst.fk_operation_group == operation_id and is_group))): self.reset_simulator_configuration() if burst_config is not None: burst_config = BurstService.load_burst_configuration(burst_config.id) if burst_config: BurstService.remove_burst_configuration(burst_config.id) return result
def stop_operation(self, operation_id, is_group, remove_after_stop=False): """ Stop the operation given by operation_id. If is_group is true stop all the operations from that group. """ operation_service = OperationService() result = False if int(is_group) == 0: result = operation_service.stop_operation(operation_id) if remove_after_stop: ProjectService().remove_operation(operation_id) else: op_group = ProjectService.get_operation_group_by_id(operation_id) operations_in_group = ProjectService.get_operations_in_group(op_group) for operation in operations_in_group: tmp_res = operation_service.stop_operation(operation.id) if remove_after_stop: ProjectService().remove_operation(operation.id) result = result or tmp_res return result
def stop_operation(self, operation_id, is_group, remove_after_stop=False): """ Stop the operation given by operation_id. If is_group is true stop all the operations from that group. """ operation_service = OperationService() result = False if int(is_group) == 0: result = operation_service.stop_operation(operation_id) if remove_after_stop: ProjectService().remove_operation(operation_id) else: op_group = ProjectService.get_operation_group_by_id(operation_id) operations_in_group = ProjectService.get_operations_in_group(op_group) for operation in operations_in_group: tmp_res = operation_service.stop_operation(operation.id) if remove_after_stop: ProjectService().remove_operation(operation.id) result = result or tmp_res return result
def cancel_or_remove_operation(self, operation_id, is_group, remove_after_stop=False): """ Stop the operation given by operation_id. If is_group is true stop all the operations from that group. """ operation_id = int(operation_id) is_group = int(is_group) != 0 # Load before we remove, to have its data in memory here burst_config = BurstService.get_burst_for_operation_id(operation_id) result = OperationService.stop_operation(operation_id, is_group, remove_after_stop) if remove_after_stop: current_burst = common.get_from_session(common.KEY_BURST_CONFIG) if current_burst is not None and burst_config is not None and current_burst.id == burst_config.id: common.remove_from_session(common.KEY_BURST_CONFIG) common.add2session(common.KEY_BURST_CONFIG, BurstConfiguration(burst_config.project.id)) return result
def cancel_or_remove_operation(self, operation_id, is_group, remove_after_stop=False): """ Stop the operation given by operation_id. If is_group is true stop all the operations from that group. """ # Load before we remove, to have its data in memory here burst_config = BurstService.get_burst_for_operation_id( operation_id, is_group) result = OperationService.stop_operation(operation_id, is_group, remove_after_stop) if remove_after_stop: current_burst = self.context.burst_config if (current_burst is not None and burst_config is not None and current_burst.id == burst_config.id and ((current_burst.fk_simulation == operation_id and not is_group) or (current_burst.fk_operation_group == operation_id and is_group))): self.reset_simulator_configuration() return result
class TestOperationService(BaseTestCase): """ Test class for the introspection module. Some tests from here do async launches. For those cases Transactional tests won't work. """ def setup_method(self): """ Reset the database before each test. """ self.clean_database() initialize_storage() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user) self.operation_service = OperationService() self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE def teardown_method(self): """ Reset the database when test is done. """ TvbProfile.current.MAX_DISK_SPACE = self.backup_hdd_size self.clean_database() def _assert_no_ddti(self): count = dao.count_datatypes(self.test_project.id, DummyDataTypeIndex) assert 0 == count def _assert_stored_ddti(self, expected_cnt=1): count = dao.count_datatypes(self.test_project.id, DummyDataTypeIndex) assert expected_cnt == count datatype = dao.try_load_last_entity_of_type(self.test_project.id, DummyDataTypeIndex) assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored." return datatype def test_datatypes_groups(self, test_adapter_factory): """ Tests if the dataType group is set correct on the dataTypes resulted from the same operation group. """ # TODO: re-write this to use groups correctly all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 0, "There should be no operation" algo = test_adapter_factory(TestAdapter3) adapter_instance = ABCAdapter.build_adapter(algo) data = {model_burst.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]} ## Create Group of operations FlowService().fire_operation(adapter_instance, self.test_user, self.test_project.id) all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 1, "Expected one operation group" assert all_operations[0][2] == 2, "Expected 2 operations in group" operation_group_id = all_operations[0][3] assert operation_group_id != None, "The operation should be part of a group." self.operation_service.stop_operation(all_operations[0][0]) self.operation_service.stop_operation(all_operations[0][1]) ## Make sure operations are executed self.operation_service.launch_operation(all_operations[0][0], False) self.operation_service.launch_operation(all_operations[0][1], False) resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id) assert len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes)) dt = dao.get_datatype_by_id(resulted_datatypes[0].id) datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id) assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect" def test_initiate_operation(self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ module = "tvb.tests.framework.adapters.testadapter1" class_name = "TestAdapter1" test_adapter_factory() adapter = TestFactory.create_adapter(module, class_name) output = adapter.get_output() output_type = output[0].__name__ tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") view_model = adapter.get_view_model()() view_model.test1_val1 = 5 view_model.test1_val2 = 5 self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, model_view=view_model) group = dao.get_algorithm_by_module(module, class_name) assert group.module == 'tvb.tests.framework.adapters.testadapter1', "Wrong data stored." assert group.classname == 'TestAdapter1', "Wrong data stored." dts, count = dao.get_values_of_datatype(self.test_project.id, DummyDataTypeIndex) assert count == 1 assert len(dts) == 1 datatype = dao.get_datatype_by_id(dts[0][0]) assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored." assert datatype.type == output_type, "Wrong data stored." def test_delete_dt_free_hdd_space(self, test_adapter_factory, operation_factory): """ Launch two operations and give enough available space for user so that both should finish. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") view_model = adapter.get_view_model()() TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(view_model)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self._assert_no_ddti() self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, model_view=view_model) datatype = self._assert_stored_ddti() # Now free some space and relaunch ProjectService().remove_datatype(self.test_project.id, datatype.gid) self._assert_no_ddti() self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, model_view=view_model) self._assert_stored_ddti() def test_launch_two_ops_hdd_with_space(self, test_adapter_factory): """ Launch two operations and give enough available space for user so that both should finish. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") view_model = adapter.get_view_model()() TvbProfile.current.MAX_DISK_SPACE = 2 * float(adapter.get_required_disk_size(view_model)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, model_view=view_model) datatype = self._assert_stored_ddti() # Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB) # plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float( adapter.get_required_disk_size(view_model)) self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, model_view=view_model) self._assert_stored_ddti(2) def test_launch_two_ops_hdd_full_space(self): """ Launch two operations and give available space for user so that the first should finish, but after the update to the user hdd size the second should not. """ adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") view_model = adapter.get_view_model()() TvbProfile.current.MAX_DISK_SPACE = (1 + float(adapter.get_required_disk_size(view_model))) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, model_view=view_model) datatype = self._assert_stored_ddti() # Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB) # plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \ float(adapter.get_required_disk_size(view_model) - 1) with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, model_view=view_model) self._assert_stored_ddti() def test_launch_operation_hdd_with_space(self): """ Test the actual operation flow by executing a test adapter. """ adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") view_model = adapter.get_view_model()() TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(view_model)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, model_view=view_model) self._assert_stored_ddti() def test_launch_operation_hdd_with_space_started_ops(self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) space_taken_by_started = 100 adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") form = TestAdapterHDDRequiredForm() adapter.submit_form(form) started_operation = model_operation.Operation(self.test_user.id, self.test_project.id, adapter.stored_adapter.id, "", status=model_operation.STATUS_STARTED, estimated_disk_size=space_taken_by_started) view_model = adapter.get_view_model()() dao.store_entity(started_operation) TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(view_model) + space_taken_by_started) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, model_view=view_model) self._assert_stored_ddti() def test_launch_operation_hdd_full_space(self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") form = TestAdapterHDDRequiredForm() adapter.submit_form(form) view_model = adapter.get_view_model()() TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(view_model) - 1) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, model_view=view_model) self._assert_no_ddti() def test_launch_operation_hdd_full_space_started_ops(self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) space_taken_by_started = 100 adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") form = TestAdapterHDDRequiredForm() adapter.submit_form(form) started_operation = model_operation.Operation(self.test_user.id, self.test_project.id, adapter.stored_adapter.id, "", status=model_operation.STATUS_STARTED, estimated_disk_size=space_taken_by_started) view_model = adapter.get_view_model()() dao.store_entity(started_operation) TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(view_model) + space_taken_by_started - 1) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, model_view=view_model) self._assert_no_ddti() def test_stop_operation(self, test_adapter_factory): """ Test that an operation is successfully stopped. """ test_adapter_factory(adapter_class=TestAdapter2) adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter2", "TestAdapter2") view_model = adapter.get_view_model()() view_model.test = 5 algo = adapter.stored_adapter algo_category = dao.get_category_by_id(algo.fk_category) operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project, algo, algo_category, {}, view_model=view_model) self.operation_service._send_to_cluster(operations, adapter) self.operation_service.stop_operation(operations[0].id) operation = dao.get_operation_by_id(operations[0].id) assert operation.status, model_operation.STATUS_CANCELED == "Operation should have been canceled!" def test_stop_operation_finished(self, test_adapter_factory): """ Test that an operation that is already finished is not changed by the stop operation. """ test_adapter_factory() adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter1", "TestAdapter1") view_model = adapter.get_view_model()() view_model.test1_val1 = 5 view_model.test1_val2 = 5 algo = adapter.stored_adapter algo_category = dao.get_category_by_id(algo.fk_category) operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project, algo, algo_category, {}, view_model=view_model) self.operation_service._send_to_cluster(operations, adapter) operation = dao.get_operation_by_id(operations[0].id) operation.status = model_operation.STATUS_FINISHED dao.store_entity(operation) self.operation_service.stop_operation(operations[0].id) operation = dao.get_operation_by_id(operations[0].id) assert operation.status, model_operation.STATUS_FINISHED == "Operation shouldn't have been canceled!"
class TestOperationService(BaseTestCase): """ Test class for the introspection module. Some tests from here do async launches. For those cases Transactional tests won't work. """ def setup_method(self): """ Reset the database before each test. """ self.clean_database() initialize_storage() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user) self.operation_service = OperationService() self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE def teardown_method(self): """ Reset the database when test is done. """ TvbProfile.current.MAX_DISK_SPACE = self.backup_hdd_size self.clean_database() def _assert_no_ddti(self): count = dao.count_datatypes(self.test_project.id, DummyDataTypeIndex) assert 0 == count def _assert_stored_ddti(self, expected_cnt=1): count = dao.count_datatypes(self.test_project.id, DummyDataTypeIndex) assert expected_cnt == count datatype = dao.try_load_last_entity_of_type(self.test_project.id, DummyDataTypeIndex) assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored." return datatype def test_datatypes_groups(self, test_adapter_factory, datatype_group_factory): """ Tests if the dataType group is set correct on the dataTypes resulted from the same operation group. """ all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 0, "There should be no operation" dt_group = datatype_group_factory(project=self.test_project) model = TestModel() test_adapter_factory() adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter1", "TestAdapter1") operations = dao.get_operations_in_group(dt_group.id) for op in operations: model.gid = uuid.uuid4() op_path = StorageInterface().get_project_folder( self.test_project.name, str(op.id)) op.view_model_gid = model.gid.hex op.algorithm = adapter.stored_adapter h5.store_view_model(model, op_path) dao.store_entity(op) all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 2, "Expected two operation groups" assert all_operations[0][2] == 6, "Expected 6 operations in one group" operation_group_id = all_operations[0][3] assert operation_group_id != None, "The operation should be part of a group." self.operation_service.stop_operation(all_operations[1][0]) self.operation_service.stop_operation(all_operations[1][1]) # Make sure operations are executed self.operation_service.launch_operation(all_operations[1][0], False) self.operation_service.launch_operation(all_operations[1][1], False) resulted_datatypes = dao.get_datatype_in_group( operation_group_id=operation_group_id) assert len( resulted_datatypes) >= 2, "Expected at least 2, but: " + str( len(resulted_datatypes)) dt = dao.get_datatype_by_id(resulted_datatypes[0].id) datatype_group = dao.get_datatypegroup_by_op_group_id( operation_group_id) assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect" def test_initiate_operation(self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ test_adapter_factory() adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter1", "TestAdapter1") view_model = TestModel() view_model.test1_val1 = 5 view_model.test1_val2 = 5 adapter.generic_attributes.subject = "Test4242" self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) dts, count = dao.get_values_of_datatype(self.test_project.id, DummyDataTypeIndex) assert count == 1 assert len(dts) == 1 datatype = dao.get_datatype_by_id(dts[0][0]) assert datatype.subject == "Test4242", "Wrong data stored." assert datatype.type == adapter.get_output( )[0].__name__, "Wrong data stored." def test_delete_dt_free_hdd_space(self, test_adapter_factory, operation_factory): """ Launch two operations and give enough available space for user so that both should finish. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") view_model = adapter.get_view_model()() TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(view_model)) self._assert_no_ddti() self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) datatype = self._assert_stored_ddti() # Now free some space and relaunch ProjectService().remove_datatype(self.test_project.id, datatype.gid) self._assert_no_ddti() self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) self._assert_stored_ddti() def test_launch_two_ops_hdd_with_space(self, test_adapter_factory): """ Launch two operations and give enough available space for user so that both should finish. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") view_model = adapter.get_view_model()() TvbProfile.current.MAX_DISK_SPACE = 2 * float( adapter.get_required_disk_size(view_model)) self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) datatype = self._assert_stored_ddti() # Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB) # plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float( adapter.get_required_disk_size(view_model)) self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) self._assert_stored_ddti(2) def test_launch_two_ops_hdd_full_space(self): """ Launch two operations and give available space for user so that the first should finish, but after the update to the user hdd size the second should not. """ adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") view_model = adapter.get_view_model()() TvbProfile.current.MAX_DISK_SPACE = ( 1 + float(adapter.get_required_disk_size(view_model))) self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) datatype = self._assert_stored_ddti() # Now update the maximum disk size to be less than size of the previously resulted dts (transform kB to MB) # plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \ float(adapter.get_required_disk_size(view_model) - 1) with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) self._assert_stored_ddti() def test_launch_operation_hdd_with_space(self): """ Test the actual operation flow by executing a test adapter. """ adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") view_model = adapter.get_view_model()() TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(view_model)) self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) self._assert_stored_ddti() def test_launch_operation_hdd_with_space_started_ops( self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) space_taken_by_started = 100 adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") form = TestAdapterHDDRequiredForm() adapter.submit_form(form) started_operation = model_operation.Operation( None, self.test_user.id, self.test_project.id, adapter.stored_adapter.id, status=model_operation.STATUS_STARTED, estimated_disk_size=space_taken_by_started) view_model = adapter.get_view_model()() dao.store_entity(started_operation) TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(view_model) + space_taken_by_started) self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) self._assert_stored_ddti() def test_launch_operation_hdd_full_space(self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") form = TestAdapterHDDRequiredForm() adapter.submit_form(form) view_model = adapter.get_view_model()() TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(view_model) - 1) with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) self._assert_no_ddti() def test_launch_operation_hdd_full_space_started_ops( self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) space_taken_by_started = 100 adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") form = TestAdapterHDDRequiredForm() adapter.submit_form(form) started_operation = model_operation.Operation( None, self.test_user.id, self.test_project.id, adapter.stored_adapter.id, status=model_operation.STATUS_STARTED, estimated_disk_size=space_taken_by_started) view_model = adapter.get_view_model()() dao.store_entity(started_operation) TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(view_model) + space_taken_by_started - 1) with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) self._assert_no_ddti() def test_stop_operation(self, test_adapter_factory): """ Test that an operation is successfully stopped. """ test_adapter_factory(adapter_class=TestAdapter2) adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter2", "TestAdapter2") view_model = adapter.get_view_model()() view_model.test = 5 algo = adapter.stored_adapter operation = self.operation_service.prepare_operation( self.test_user.id, self.test_project, algo, view_model=view_model) self.operation_service._send_to_cluster(operation, adapter) self.operation_service.stop_operation(operation) operation = dao.get_operation_by_id(operation.id) assert operation.status, model_operation.STATUS_CANCELED == "Operation should have been canceled!" def test_stop_operation_finished(self, test_adapter_factory): """ Test that an operation that is already finished is not changed by the stop operation. """ test_adapter_factory() adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter1", "TestAdapter1") view_model = adapter.get_view_model()() view_model.test1_val1 = 5 view_model.test1_val2 = 5 algo = adapter.stored_adapter operation = self.operation_service.prepare_operation( self.test_user.id, self.test_project, algo, view_model=view_model) self.operation_service._send_to_cluster(operation, adapter) operation = dao.get_operation_by_id(operation.id) operation.status = model_operation.STATUS_FINISHED dao.store_entity(operation) self.operation_service.stop_operation(operation.id) operation = dao.get_operation_by_id(operation.id) assert operation.status, model_operation.STATUS_FINISHED == "Operation shouldn't have been canceled!" def test_fire_operation(self): """ Test preparation of an adapter and launch mechanism. """ adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter1", "TestAdapter1") test_user = TestFactory.create_user(username="******") test_project = TestFactory.create_project(admin=test_user, name="test_project_fire_sim") result = OperationService().fire_operation( adapter, test_user, test_project.id, view_model=adapter.get_view_model()()) assert result.endswith("has finished."), "Operation fail"
class OperationServiceTest(BaseTestCase): """ Test class for the introspection module. Some tests from here do async launches. For those cases Transactional tests won't work. """ def setUp(self): """ Reset the database before each test. """ self.clean_database() initialize_storage() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user) self.operation_service = OperationService() self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE def tearDown(self): """ Reset the database when test is done. """ TvbProfile.current.MAX_DISK_SPACE = self.backup_hdd_size self.clean_database() def test_datatypes_groups(self): """ Tests if the dataType group is set correct on the dataTypes resulted from the same operation group. """ flow_service = FlowService() all_operations = dao.get_filtered_operations(self.test_project.id, None) self.assertEqual(len(all_operations), 0, "There should be no operation") algogroup = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3') group, _ = flow_service.prepare_adapter(self.test_project.id, algogroup) adapter_instance = flow_service.build_adapter_instance(group) data = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]} ## Create Group of operations flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data) all_operations = dao.get_filtered_operations(self.test_project.id, None) self.assertEqual(len(all_operations), 1, "Expected one operation group") self.assertEqual(all_operations[0][2], 2, "Expected 2 operations in group") operation_group_id = all_operations[0][3] self.assertNotEquals(operation_group_id, None, "The operation should be part of a group.") self.operation_service.stop_operation(all_operations[0][0]) self.operation_service.stop_operation(all_operations[0][1]) ## Make sure operations are executed self.operation_service.launch_operation(all_operations[0][0], False) self.operation_service.launch_operation(all_operations[0][1], False) resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id) self.assertTrue(len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes))) dt = dao.get_datatype_by_id(resulted_datatypes[0].id) datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id) self.assertEqual(dt.fk_datatype_group, datatype_group.id, "DataTypeGroup is incorrect") def test_initiate_operation(self): """ Test the actual operation flow by executing a test adapter. """ module = "tvb.tests.framework.adapters.testadapter1" class_name = "TestAdapter1" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) output = adapter.get_output() output_type = output[0].__name__ data = {"test1_val1": 5, "test1_val2": 5} tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") res = self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) self.assertTrue(res.index("has finished.") > 10, "Operation didn't finish") group = dao.find_group(module, class_name) self.assertEqual(group.module, 'tvb.tests.framework.adapters.testadapter1', "Wrong data stored.") self.assertEqual(group.classname, 'TestAdapter1', "Wrong data stored.") dts, count = dao.get_values_of_datatype(self.test_project.id, Datatype1) self.assertEqual(count, 1) self.assertEqual(len(dts), 1) datatype = dao.get_datatype_by_id(dts[0][0]) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") self.assertEqual(datatype.type, output_type, "Wrong data stored.") def test_delete_dt_free_HDD_space(self): """ Launch two operations and give enough available space for user so that both should finish. """ module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) output = adapter.get_output() output_type = output[0].__name__ data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 0) self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 1) datatype = dao.get_datatype_by_id(dts[0][0]) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") self.assertEqual(datatype.type, output_type, "Wrong data stored.") #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB) #plus what is estimated to be required from the next one (transform from B to MB) ProjectService().remove_datatype(self.test_project.id, datatype.gid) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 0) self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 1) datatype = dao.get_datatype_by_id(dts[0][0]) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") self.assertEqual(datatype.type, output_type, "Wrong data stored.") def test_launch_two_ops_HDD_with_space(self): """ Launch two operations and give enough available space for user so that both should finish. """ module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) output = adapter.get_output() output_type = output[0].__name__ data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = 2 * float(adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 1) datatype = dao.get_datatype_by_id(dts[0][0]) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") self.assertEqual(datatype.type, output_type, "Wrong data stored.") #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB) #plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float(adapter.get_required_disk_size(**data)) self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 2) datatype = dao.get_datatype_by_id(dts[1][0]) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") self.assertEqual(datatype.type, output_type, "Wrong data stored.") def test_launch_two_ops_HDD_full_space(self): """ Launch two operations and give available space for user so that the first should finish, but after the update to the user hdd size the second should not. """ module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) output = adapter.get_output() output_type = output[0].__name__ data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = (1 + float(adapter.get_required_disk_size(**data))) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 1) datatype = dao.get_datatype_by_id(dts[0][0]) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") self.assertEqual(datatype.type, output_type, "Wrong data stored.") #Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB) #plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \ float(adapter.get_required_disk_size(**data) - 1) self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 1) def test_launch_operation_HDD_with_space(self): """ Test the actual operation flow by executing a test adapter. """ module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) output = adapter.get_output() output_type = output[0].__name__ data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 1) datatype = dao.get_datatype_by_id(dts[0][0]) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") self.assertEqual(datatype.type, output_type, "Wrong data stored.") def test_launch_operation_HDD_with_space_started_ops(self): """ Test the actual operation flow by executing a test adapter. """ space_taken_by_started = 100 module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) started_operation = model.Operation(self.test_user.id, self.test_project.id, group.id, "", status=model.STATUS_STARTED, estimated_disk_size=space_taken_by_started) dao.store_entity(started_operation) adapter = FlowService().build_adapter_instance(group) output = adapter.get_output() output_type = output[0].__name__ data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 1) datatype = dao.get_datatype_by_id(dts[0][0]) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") self.assertEqual(datatype.type, output_type, "Wrong data stored.") def test_launch_operation_HDD_full_space(self): """ Test the actual operation flow by executing a test adapter. """ module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) - 1) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 0) def test_launch_operation_HDD_full_space_started_ops(self): """ Test the actual operation flow by executing a test adapter. """ space_taken_by_started = 100 module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) started_operation = model.Operation(self.test_user.id, self.test_project.id, group.id, "", status=model.STATUS_STARTED, estimated_disk_size=space_taken_by_started) dao.store_entity(started_operation) adapter = FlowService().build_adapter_instance(group) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started - 1) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0] self.assertEqual(len(dts), 0) def test_stop_operation(self): """ Test that an operation is successfully stopped. """ module = "tvb.tests.framework.adapters.testadapter2" class_name = "TestAdapter2" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) data = {"test": 5} algo_group = adapter.algorithm_group algo_category = dao.get_category_by_id(algo_group.fk_category) algo = dao.get_algorithm_by_group(algo_group.id) operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo, algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data) self.operation_service._send_to_cluster(operations, adapter) self.operation_service.stop_operation(operations[0].id) operation = dao.get_operation_by_id(operations[0].id) self.assertEqual(operation.status, model.STATUS_CANCELED, "Operation should have been canceled!") def test_stop_operation_finished(self): """ Test that an operation that is already finished is not changed by the stop operation. """ module = "tvb.tests.framework.adapters.testadapter1" class_name = "TestAdapter1" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) data = {"test1_val1": 5, 'test1_val2': 5} algo_group = adapter.algorithm_group algo_category = dao.get_category_by_id(algo_group.fk_category) algo = dao.get_algorithm_by_group(algo_group.id) operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo, algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data) self.operation_service._send_to_cluster(operations, adapter) operation = dao.get_operation_by_id(operations[0].id) operation.status = model.STATUS_FINISHED dao.store_entity(operation) self.operation_service.stop_operation(operations[0].id) operation = dao.get_operation_by_id(operations[0].id) self.assertEqual(operation.status, model.STATUS_FINISHED, "Operation shouldn't have been canceled!") def test_array_from_string(self): """ Simple test for parse array on 1d, 2d and 3d array. """ row = {'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'quantifier': 'manual', 'elementType': 'float', 'type': 'array', 'options': None, 'name': 'test'} input_data_string = '[ [1 2 3] [4 5 6]]' output = string2array(input_data_string, ' ', row['elementType']) self.assertEqual(output.shape, (2, 3), "Dimensions not properly parsed") for i in output[0]: self.assertTrue(i in [1, 2, 3]) for i in output[1]: self.assertTrue(i in [4, 5, 6]) input_data_string = '[1, 2, 3, 4, 5, 6]' output = string2array(input_data_string, ',', row['elementType']) self.assertEqual(output.shape, (6,), "Dimensions not properly parsed") for i in output: self.assertTrue(i in [1, 2, 3, 4, 5, 6]) input_data_string = '[ [ [1,1], [2, 2] ], [ [3 ,3], [4,4] ] ]' output = string2array(input_data_string, ',', row['elementType']) self.assertEqual(output.shape, (2, 2, 2), "Wrong dimensions.") for i in output[0][0]: self.assertTrue(i == 1) for i in output[0][1]: self.assertTrue(i == 2) for i in output[1][0]: self.assertTrue(i == 3) for i in output[1][1]: self.assertTrue(i == 4) row = {'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'quantifier': 'manual', 'elementType': 'str', 'type': 'array', 'options': None, 'name': 'test'} input_data_string = '[1, 2, 3, 4, 5, 6]' output = string2array(input_data_string, ',', row['elementType']) for i in output: self.assertTrue(i in [1, 2, 3, 4, 5, 6]) def test_wrong_array_from_string(self): """Test that parsing an array from string is throwing the expected exception when wrong input string""" row = {'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'quantifier': 'manual', 'elementType': 'float', 'type': 'array', 'options': None, 'name': 'test'} input_data_string = '[ [1,2 3] [4,5,6]]' self.assertRaises(ValueError, string2array, input_data_string, ',', row['elementType']) input_data_string = '[ [1,2,wrong], [4, 5, 6]]' self.assertRaises(ValueError, string2array, input_data_string, ',', row['elementType']) row = {'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'quantifier': 'manual', 'elementType': 'str', 'type': 'array', 'options': None, 'name': 'test'} output = string2array(input_data_string, ',', row['elementType']) self.assertEqual(output.shape, (2, 3)) self.assertEqual(output[0][2], 'wrong', 'String data not converted properly') input_data_string = '[ [1,2 3] [4,5,6]]' output = string2array(input_data_string, ',', row['elementType']) self.assertEqual(output[0][1], '2 3') def test_reduce_dimension_component(self): """ This method tests if the data passed to the launch method of the NDimensionArrayAdapter adapter is correct. The passed data should be a list of arrays with one dimension. """ inserted_count = FlowService().get_available_datatypes(self.test_project.id, "tvb.datatypes.arrays.MappedArray")[1] self.assertEqual(inserted_count, 0, "Expected to find no data.") #create an operation algorithm_id = FlowService().get_algorithm_by_module_and_class('tvb.tests.framework.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter')[0].id operation = model.Operation(self.test_user.id, self.test_project.id, algorithm_id, 'test params', meta=json.dumps({DataTypeMetaData.KEY_STATE: "RAW_DATA"}), status=model.STATUS_FINISHED, method_name=ABCAdapter.LAUNCH_METHOD) operation = dao.store_entity(operation) #save the array wrapper in DB adapter_instance = NDimensionArrayAdapter() PARAMS = {} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) inserted_data = FlowService().get_available_datatypes(self.test_project.id, "tvb.datatypes.arrays.MappedArray")[0] self.assertEqual(len(inserted_data), 1, "Problems when inserting data") gid = inserted_data[0][2] entity = dao.get_datatype_by_gid(gid) #from the 3D array do not select any array PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": "requiredDim_1", "input_data_dimensions_1": "", "input_data_dimensions_2": ""} try: self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) self.fail("Test should not pass. The resulted array should be a 1D array.") except Exception: # OK, do nothing; we were expecting to produce a 1D array pass #from the 3D array select only a 1D array first_dim = [gid + '_1_0', 'requiredDim_1'] PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": first_dim, "input_data_dimensions_1": gid + "_2_1"} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) expected_result = entity.array_data[:, 0, 1] actual_result = adapter_instance.launch_param self.assertEqual(len(actual_result), len(expected_result), "Not the same size for results!") self.assertTrue(numpy.equal(actual_result, expected_result).all()) #from the 3D array select a 2D array first_dim = [gid + '_1_0', gid + '_1_1', 'requiredDim_2'] PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": first_dim, "input_data_dimensions_1": gid + "_2_1"} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) expected_result = entity.array_data[slice(0, None), [0, 1], 1] actual_result = adapter_instance.launch_param self.assertEqual(len(actual_result), len(expected_result), "Not the same size for results!") self.assertTrue(numpy.equal(actual_result, expected_result).all()) #from 3D array select 1D array by applying SUM function on the first #dimension and average function on the second dimension PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": ["requiredDim_1", "func_sum"], "input_data_dimensions_1": "func_average", "input_data_dimensions_2": ""} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) aux = numpy.sum(entity.array_data, axis=0) expected_result = numpy.average(aux, axis=0) actual_result = adapter_instance.launch_param self.assertEqual(len(actual_result), len(expected_result), "Not the same size of results!") self.assertTrue(numpy.equal(actual_result, expected_result).all()) #from 3D array select a 2D array and apply op. on the second dimension PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": ["requiredDim_2", "func_sum", "expected_shape_x,512", "operations_x,>"], "input_data_dimensions_1": "", "input_data_dimensions_2": ""} try: self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) self.fail("Test should not pass! The second dimension of the array should be >512.") except Exception: # OK, do nothing; pass
class TestOperationService(BaseTestCase): """ Test class for the introspection module. Some tests from here do async launches. For those cases Transactional tests won't work. TODO: this is still to be refactored, for being huge, with duplicates and many irrelevant checks """ def setup_method(self): """ Reset the database before each test. """ self.clean_database() initialize_storage() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user) self.operation_service = OperationService() self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE def teardown_method(self): """ Reset the database when test is done. """ TvbProfile.current.MAX_DISK_SPACE = self.backup_hdd_size self.clean_database() def _assert_no_dt2(self): count = dao.count_datatypes(self.test_project.id, Datatype2) assert 0 == count def _assert_stored_dt2(self, expected_cnt=1): count = dao.count_datatypes(self.test_project.id, Datatype2) assert expected_cnt == count datatype = dao.try_load_last_entity_of_type(self.test_project.id, Datatype2) assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored." return datatype def test_datatypes_groups(self): """ Tests if the dataType group is set correct on the dataTypes resulted from the same operation group. """ flow_service = FlowService() all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 0, "There should be no operation" adapter_instance = TestFactory.create_adapter('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3') data = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]} ## Create Group of operations flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data) all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 1, "Expected one operation group" assert all_operations[0][2] == 2, "Expected 2 operations in group" operation_group_id = all_operations[0][3] assert operation_group_id != None, "The operation should be part of a group." self.operation_service.stop_operation(all_operations[0][0]) self.operation_service.stop_operation(all_operations[0][1]) ## Make sure operations are executed self.operation_service.launch_operation(all_operations[0][0], False) self.operation_service.launch_operation(all_operations[0][1], False) resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id) assert len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes)) dt = dao.get_datatype_by_id(resulted_datatypes[0].id) datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id) assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect" def test_initiate_operation(self): """ Test the actual operation flow by executing a test adapter. """ module = "tvb.tests.framework.adapters.testadapter1" class_name = "TestAdapter1" adapter = TestFactory.create_adapter(module, class_name) output = adapter.get_output() output_type = output[0].__name__ data = {"test1_val1": 5, "test1_val2": 5} tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") res = self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) assert res.index("has finished.") > 10, "Operation didn't finish" group = dao.get_algorithm_by_module(module, class_name) assert group.module == 'tvb.tests.framework.adapters.testadapter1', "Wrong data stored." assert group.classname == 'TestAdapter1', "Wrong data stored." dts, count = dao.get_values_of_datatype(self.test_project.id, Datatype1) assert count == 1 assert len(dts) == 1 datatype = dao.get_datatype_by_id(dts[0][0]) assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored." assert datatype.type == output_type, "Wrong data stored." def test_delete_dt_free_HDD_space(self): """ Launch two operations and give enough available space for user so that both should finish. """ adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self._assert_no_dt2() self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) datatype = self._assert_stored_dt2() # Now free some space and relaunch ProjectService().remove_datatype(self.test_project.id, datatype.gid) self._assert_no_dt2() self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) self._assert_stored_dt2() def test_launch_two_ops_HDD_with_space(self): """ Launch two operations and give enough available space for user so that both should finish. """ adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = 2 * float(adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) datatype = self._assert_stored_dt2() #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB) #plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float(adapter.get_required_disk_size(**data)) self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) self._assert_stored_dt2(2) def test_launch_two_ops_HDD_full_space(self): """ Launch two operations and give available space for user so that the first should finish, but after the update to the user hdd size the second should not. """ adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = (1 + float(adapter.get_required_disk_size(**data))) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) datatype = self._assert_stored_dt2() #Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB) #plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \ float(adapter.get_required_disk_size(**data) - 1) with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user,self.test_project.id, adapter, tmp_folder, **data) self._assert_stored_dt2() def test_launch_operation_HDD_with_space(self): """ Test the actual operation flow by executing a test adapter. """ adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) self._assert_stored_dt2() def test_launch_operation_HDD_with_space_started_ops(self): """ Test the actual operation flow by executing a test adapter. """ space_taken_by_started = 100 adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") started_operation = model.Operation(self.test_user.id, self.test_project.id, adapter.stored_adapter.id, "", status=model.STATUS_STARTED, estimated_disk_size=space_taken_by_started) dao.store_entity(started_operation) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) self._assert_stored_dt2() def test_launch_operation_HDD_full_space(self): """ Test the actual operation flow by executing a test adapter. """ adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) - 1) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) self._assert_no_dt2() def test_launch_operation_HDD_full_space_started_ops(self): """ Test the actual operation flow by executing a test adapter. """ space_taken_by_started = 100 adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") started_operation = model.Operation(self.test_user.id, self.test_project.id, adapter.stored_adapter.id, "", status=model.STATUS_STARTED, estimated_disk_size=space_taken_by_started) dao.store_entity(started_operation) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started - 1) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user,self.test_project.id, adapter, tmp_folder, **data) self._assert_no_dt2() def test_stop_operation(self): """ Test that an operation is successfully stopped. """ adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter2", "TestAdapter2") data = {"test": 5} algo = adapter.stored_adapter algo_category = dao.get_category_by_id(algo.fk_category) operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo, algo_category, {}, **data) self.operation_service._send_to_cluster(operations, adapter) self.operation_service.stop_operation(operations[0].id) operation = dao.get_operation_by_id(operations[0].id) assert operation.status, model.STATUS_CANCELED == "Operation should have been canceled!" def test_stop_operation_finished(self): """ Test that an operation that is already finished is not changed by the stop operation. """ adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter1", "TestAdapter1") data = {"test1_val1": 5, 'test1_val2': 5} algo = adapter.stored_adapter algo_category = dao.get_category_by_id(algo.fk_category) operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo, algo_category, {}, **data) self.operation_service._send_to_cluster(operations, adapter) operation = dao.get_operation_by_id(operations[0].id) operation.status = model.STATUS_FINISHED dao.store_entity(operation) self.operation_service.stop_operation(operations[0].id) operation = dao.get_operation_by_id(operations[0].id) assert operation.status, model.STATUS_FINISHED == "Operation shouldn't have been canceled!" def test_array_from_string(self): """ Simple test for parse array on 1d, 2d and 3d array. """ row = {'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'elementType': 'float', 'type': 'array', 'options': None, 'name': 'test'} input_data_string = '[ [1 2 3] [4 5 6]]' output = string2array(input_data_string, ' ', row['elementType']) assert output.shape, (2, 3) == "Dimensions not properly parsed" for i in output[0]: assert i in [1, 2, 3] for i in output[1]: assert i in [4, 5, 6] input_data_string = '[1, 2, 3, 4, 5, 6]' output = string2array(input_data_string, ',', row['elementType']) assert output.shape == (6,), "Dimensions not properly parsed" for i in output: assert i in [1, 2, 3, 4, 5, 6] input_data_string = '[ [ [1,1], [2, 2] ], [ [3 ,3], [4,4] ] ]' output = string2array(input_data_string, ',', row['elementType']) assert output.shape == (2, 2, 2), "Wrong dimensions." for i in output[0][0]: assert i == 1 for i in output[0][1]: assert i == 2 for i in output[1][0]: assert i == 3 for i in output[1][1]: assert i == 4 row = {'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'elementType': 'str', 'type': 'array', 'options': None, 'name': 'test'} input_data_string = '[1, 2, 3, 4, 5, 6]' output = string2array(input_data_string, ',', row['elementType']) for i in output: assert i in [1, 2, 3, 4, 5, 6] def test_wrong_array_from_string(self): """Test that parsing an array from string is throwing the expected exception when wrong input string""" row = {'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'elementType': 'float', 'type': 'array', 'options': None, 'name': 'test'} input_data_string = '[ [1,2 3] [4,5,6]]' with pytest.raises(ValueError): string2array(input_data_string, ',', row['elementType']) input_data_string = '[ [1,2,wrong], [4, 5, 6]]' with pytest.raises(ValueError): string2array(input_data_string, ',', row['elementType']) row = {'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'elementType': 'str', 'type': 'array', 'options': None, 'name': 'test'} output = string2array(input_data_string, ',', row['elementType']) assert output.shape == (2, 3) assert output[0][2] == 'wrong', 'String data not converted properly' input_data_string = '[ [1,2 3] [4,5,6]]' output = string2array(input_data_string, ',', row['elementType']) assert output[0][1] == '2 3' def test_reduce_dimension_component(self): """ This method tests if the data passed to the launch method of the NDimensionArrayAdapter adapter is correct. The passed data should be a list of arrays with one dimension. """ inserted_count = FlowService().get_available_datatypes(self.test_project.id, "tvb.datatypes.arrays.MappedArray")[1] assert inserted_count == 0, "Expected to find no data." #create an operation algorithm_id = FlowService().get_algorithm_by_module_and_class('tvb.tests.framework.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter').id operation = model.Operation(self.test_user.id, self.test_project.id, algorithm_id, 'test params', meta=json.dumps({DataTypeMetaData.KEY_STATE: "RAW_DATA"}), status=model.STATUS_FINISHED) operation = dao.store_entity(operation) #save the array wrapper in DB adapter_instance = NDimensionArrayAdapter() PARAMS = {} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) inserted_data = FlowService().get_available_datatypes(self.test_project.id, "tvb.datatypes.arrays.MappedArray")[0] assert len(inserted_data) == 1, "Problems when inserting data" gid = inserted_data[0][2] entity = dao.get_datatype_by_gid(gid) #from the 3D array do not select any array PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": "requiredDim_1", "input_data_dimensions_1": "", "input_data_dimensions_2": ""} try: self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) raise AssertionError("Test should not pass. The resulted array should be a 1D array.") except Exception: # OK, do nothing; we were expecting to produce a 1D array pass #from the 3D array select only a 1D array first_dim = [gid + '_1_0', 'requiredDim_1'] PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": first_dim, "input_data_dimensions_1": gid + "_2_1"} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) expected_result = entity.array_data[:, 0, 1] actual_result = adapter_instance.launch_param assert len(actual_result) == len(expected_result), "Not the same size for results!" assert numpy.equal(actual_result, expected_result).all() #from the 3D array select a 2D array first_dim = [gid + '_1_0', gid + '_1_1', 'requiredDim_2'] PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": first_dim, "input_data_dimensions_1": gid + "_2_1"} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) expected_result = entity.array_data[slice(0, None), [0, 1], 1] actual_result = adapter_instance.launch_param assert len(actual_result) == len(expected_result), "Not the same size for results!" assert numpy.equal(actual_result, expected_result).all() #from 3D array select 1D array by applying SUM function on the first #dimension and average function on the second dimension PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": ["requiredDim_1", "func_sum"], "input_data_dimensions_1": "func_average", "input_data_dimensions_2": ""} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) aux = numpy.sum(entity.array_data, axis=0) expected_result = numpy.average(aux, axis=0) actual_result = adapter_instance.launch_param assert len(actual_result) == len(expected_result), "Not the same size of results!" assert numpy.equal(actual_result, expected_result).all() #from 3D array select a 2D array and apply op. on the second dimension PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": ["requiredDim_2", "func_sum", "expected_shape_x,512", "operations_x,>"], "input_data_dimensions_1": "", "input_data_dimensions_2": ""} try: self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) raise AssertionError("Test should not pass! The second dimension of the array should be >512.") except Exception: # OK, do nothing; pass
class BurstService(object): """ Service layer for Burst related entities. """ def __init__(self): self.operation_service = OperationService() self.workflow_service = WorkflowService() self.logger = get_logger(self.__class__.__module__) self.cache_portlet_configurators = {} def build_portlet_interface(self, portlet_configuration, project_id): """ From a portlet_id and a project_id, first build the portlet entity then get it's configurable interface. :param portlet_configuration: a portlet configuration entity. It holds at the least the portlet_id, and in case any default parameters were saved they can be rebuilt from the analyzers // visualizer parameters :param project_id: the id of the current project :returns: the portlet interface will be of the following form:: [{'interface': adapter_interface, 'prefix': prefix_for_parameter_names, 'subalg': {algorithm_field_name: default_algorithm_value}, 'algo_group': algorithm_group, 'alg_ui_name': displayname}, ......] A list of dictionaries for each adapter that makes up the portlet. """ portlet_configurer = self._get_portlet_configurer(portlet_configuration.portlet_id) portlet_interface = portlet_configurer.get_configurable_interface() for adapter_conf in portlet_interface: interface = adapter_conf.interface itree_mngr = InputTreeManager() interface = itree_mngr.fill_input_tree_with_options(interface, project_id, adapter_conf.stored_adapter.fk_category) adapter_conf.interface = itree_mngr.prepare_param_names(interface) portlet_configurer.update_default_values(portlet_interface, portlet_configuration) portlet_configurer.prefix_adapters_parameters(portlet_interface) return portlet_interface def _get_portlet_configurer(self, portlet_id): if portlet_id not in self.cache_portlet_configurators: portlet_entity = dao.get_portlet_by_id(portlet_id) if portlet_entity is None: raise InvalidPortletConfiguration("No portlet entity located in database with id=%s. " % portlet_id) self.cache_portlet_configurators[portlet_id] = PortletConfigurer(portlet_entity) self.logger.debug("Recently parsed portlet XML:" + str([portlet_entity])) return self.cache_portlet_configurators[portlet_id] def update_portlet_configuration(self, portlet_configuration, submited_parameters): """ :param portlet_configuration: the portlet configuration that needs to be updated :param submited_parameters: a list of parameters as submitted from the UI. This is a dictionary in the form : {'dynamic' : {name:value pairs}, 'static' : {name:value pairs}} All names are prefixed with adapter specific generated prefix. """ portlet_configurer = self._get_portlet_configurer(portlet_configuration.portlet_id) return portlet_configurer.update_portlet_configuration(portlet_configuration, submited_parameters) def new_burst_configuration(self, project_id): """ Return a new burst configuration entity with all the default values. """ burst_configuration = model.BurstConfiguration(project_id) burst_configuration.selected_tab = 0 # Now set the default portlets for the specified burst configuration. # The default portlets are specified in the __init__.py script from tvb root. for tab_idx, value in DEFAULT_PORTLETS.items(): for sel_idx, portlet_identifier in value.items(): portlet = BurstService.get_portlet_by_identifier(portlet_identifier) if portlet is not None: portlet_configuration = self.new_portlet_configuration(portlet.id, tab_idx, sel_idx, portlet.algorithm_identifier) burst_configuration.set_portlet(tab_idx, sel_idx, portlet_configuration) return burst_configuration @staticmethod def _store_burst_config(burst_config): """ Store a burst configuration entity. """ burst_config.prepare_before_save() saved_entity = dao.store_entity(burst_config) return saved_entity.id @staticmethod def get_available_bursts(project_id): """ Return all the burst for the current project. """ bursts = dao.get_bursts_for_project(project_id, page_size=MAX_BURSTS_DISPLAYED) or [] for burst in bursts: burst.prepare_after_load() return bursts @staticmethod def populate_burst_disk_usage(bursts): """ Adds a disk_usage field to each burst object. The disk usage is computed as the sum of the datatypes generated by a burst """ sizes = dao.compute_bursts_disk_size([b.id for b in bursts]) for b in bursts: b.disk_size = format_bytes_human(sizes[b.id]) @staticmethod def rename_burst(burst_id, new_name): """ Rename the burst given by burst_id, setting it's new name to burst_name. """ burst = dao.get_burst_by_id(burst_id) burst.name = new_name dao.store_entity(burst) def load_burst(self, burst_id): """ :param burst_id: the id of the burst that should be loaded Having this input the method should: - load the entity from the DB - get all the workflow steps for the saved burst id - go trough the visualization workflow steps to create the tab configuration of the burst using the tab_index and index_in_tab fields saved on each workflow_step """ burst = dao.get_burst_by_id(burst_id) burst.prepare_after_load() burst.reset_tabs() burst_workflows = dao.get_workflows_for_burst(burst.id) group_gid = None if len(burst_workflows) == 1: # A simple burst with no range parameters burst = self.__populate_tabs_from_workflow(burst, burst_workflows[0]) elif len(burst_workflows) > 1: # A burst workflow with a range of values, created multiple workflows and need # to launch parameter space exploration with the resulted group self.__populate_tabs_from_workflow(burst, burst_workflows[0]) executed_steps = dao.get_workflow_steps(burst_workflows[0].id) operation = dao.get_operation_by_id(executed_steps[0].fk_operation) if operation.operation_group: workflow_group = dao.get_datatypegroup_by_op_group_id(operation.operation_group.id) group_gid = workflow_group.gid return burst, group_gid @staticmethod def __populate_tabs_from_workflow(burst_entity, workflow): """ Given a burst and a workflow populate the tabs of the burst with the PortletConfigurations generated from the steps of the workflow. """ visualizers = dao.get_visualization_steps(workflow.id) for entry in visualizers: ## For each visualize step, also load all of the analyze steps. portlet_cfg = PortletConfiguration(entry.fk_portlet) portlet_cfg.set_visualizer(entry) analyzers = dao.get_workflow_steps_for_position(entry.fk_workflow, entry.tab_index, entry.index_in_tab) portlet_cfg.set_analyzers(analyzers) burst_entity.tabs[entry.tab_index].portlets[entry.index_in_tab] = portlet_cfg return burst_entity def load_tab_configuration(self, burst_entity, op_id): """ Given a burst entity and an operation id, find the workflow to which the op_id belongs and the load the burst_entity's tab configuration with those workflow steps. """ originating_workflow = dao.get_workflow_for_operation_id(op_id) burst_entity = self.__populate_tabs_from_workflow(burst_entity, originating_workflow) return burst_entity def new_portlet_configuration(self, portlet_id, tab_nr=-1, index_in_tab=-1, portlet_name='Default'): """ Return a new portlet configuration entity with default parameters. :param portlet_id: the id of the portlet for which a configuration will be stored :param tab_nr: the index of the currently selected tab :param index_in_tab: the index from the currently selected tab """ portlet_configurer = self._get_portlet_configurer(portlet_id) configuration = portlet_configurer.create_new_portlet_configuration(portlet_name) for wf_step in configuration.analyzers: wf_step.tab_index = tab_nr wf_step.index_in_tab = index_in_tab configuration.visualizer.tab_index = tab_nr configuration.visualizer.index_in_tab = index_in_tab return configuration @staticmethod def get_available_portlets(): """ :returns: a list of all the available portlet entites """ return dao.get_available_portlets() @staticmethod def get_portlet_by_id(portlet_id): """ :returns: the portlet entity with the id =@portlet_id """ return dao.get_portlet_by_id(portlet_id) @staticmethod def get_portlet_by_identifier(portlet_identifier): """ :returns: the portlet entity with the algorithm identifier =@portlet_identifier """ return dao.get_portlet_by_identifier(portlet_identifier) def launch_burst(self, burst_configuration, simulator_index, simulator_id, user_id, launch_mode=LAUNCH_NEW): """ Given a burst configuration and all the necessary data do the actual launch. :param burst_configuration: BurstConfiguration :param simulator_index: the position within the workflows step list that the simulator will take. This is needed so that the rest of the portlet workflow steps know what steps do their dynamic parameters come from. :param simulator_id: the id of the simulator adapter as stored in the DB. It's needed to load the simulator algo group and category that are then passed to the launcher's prepare_operation method. :param user_id: the id of the user that launched this burst :param launch_mode: new/branch/continue """ ## 1. Prepare BurstConfiguration entity if launch_mode == LAUNCH_NEW: ## Fully new entity for new simulation burst_config = burst_configuration.clone() if burst_config.name is None: new_id = dao.get_max_burst_id() + 1 burst_config.name = 'simulation_' + str(new_id) else: ## Branch or Continue simulation burst_config = burst_configuration simulation_state = dao.get_generic_entity(SIMULATION_DATATYPE_MODULE + "." + SIMULATION_DATATYPE_CLASS, burst_config.id, "fk_parent_burst") if simulation_state is None or len(simulation_state) < 1: exc = BurstServiceException("Simulation State not found for %s, " "thus we are unable to branch from it!" % burst_config.name) self.logger.error(exc) raise exc simulation_state = simulation_state[0] burst_config.update_simulation_parameter("simulation_state", simulation_state.gid) burst_config = burst_configuration.clone() count = dao.count_bursts_with_name(burst_config.name, burst_config.fk_project) burst_config.name = burst_config.name + "_" + launch_mode + str(count) ## 2. Create Operations and do the actual launch if launch_mode in [LAUNCH_NEW, LAUNCH_BRANCH]: ## New Burst entry in the history burst_id = self._store_burst_config(burst_config) thread = threading.Thread(target=self._async_launch_and_prepare, kwargs={'burst_config': burst_config, 'simulator_index': simulator_index, 'simulator_id': simulator_id, 'user_id': user_id}) thread.start() return burst_id, burst_config.name else: ## Continue simulation ## TODO return burst_config.id, burst_config.name @transactional def _prepare_operations(self, burst_config, simulator_index, simulator_id, user_id): """ Prepare all required operations for burst launch. """ project_id = burst_config.fk_project burst_id = burst_config.id workflow_step_list = [] starting_index = simulator_index + 1 sim_algo = FlowService().get_algorithm_by_identifier(simulator_id) metadata = {DataTypeMetaData.KEY_BURST: burst_id} launch_data = burst_config.get_all_simulator_values()[0] operations, group = self.operation_service.prepare_operations(user_id, project_id, sim_algo, sim_algo.algorithm_category, metadata, **launch_data) group_launched = group is not None if group_launched: starting_index += 1 for tab in burst_config.tabs: for portlet_cfg in tab.portlets: ### For each portlet configuration stored, update the step index ### ### and also change the dynamic parameters step indexes to point ### ### to the simulator outputs. ## if portlet_cfg is not None: analyzers = portlet_cfg.analyzers visualizer = portlet_cfg.visualizer for entry in analyzers: entry.step_index = starting_index self.workflow_service.set_dynamic_step_references(entry, simulator_index) workflow_step_list.append(entry) starting_index += 1 ### Change the dynamic parameters to point to the last adapter from this portlet execution. visualizer.step_visible = False if len(workflow_step_list) > 0 and isinstance(workflow_step_list[-1], model.WorkflowStep): self.workflow_service.set_dynamic_step_references(visualizer, workflow_step_list[-1].step_index) else: self.workflow_service.set_dynamic_step_references(visualizer, simulator_index) ### Only for a single operation have the step of visualization, otherwise is useless. if not group_launched: workflow_step_list.append(visualizer) if group_launched: ### For a group of operations, make sure the metric for PSE view ### is also computed, immediately after the simulation. metric_algo = FlowService().get_algorithm_by_module_and_class(MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS) metric_interface = FlowService().prepare_adapter(project_id, metric_algo) dynamics = {} for entry in metric_interface: # We have a select that should be the dataType and a select multiple with the # required metric algorithms to be evaluated. Only dynamic parameter should be # the select type. if entry[KEY_TYPE] == TYPE_SELECT: dynamics[entry[KEY_NAME]] = {WorkflowStepConfiguration.DATATYPE_INDEX_KEY: 0, WorkflowStepConfiguration.STEP_INDEX_KEY: simulator_index} metric_step = model.WorkflowStep(algorithm_id=metric_algo.id, step_index=simulator_index + 1, static_param={}, dynamic_param=dynamics) metric_step.step_visible = False workflow_step_list.insert(0, metric_step) workflows = self.workflow_service.create_and_store_workflow(project_id, burst_id, simulator_index, simulator_id, operations) self.operation_service.prepare_operations_for_workflowsteps(workflow_step_list, workflows, user_id, burst_id, project_id, group, operations) operation_ids = [operation.id for operation in operations] return operation_ids def _async_launch_and_prepare(self, burst_config, simulator_index, simulator_id, user_id): """ Prepare operations asynchronously. """ try: operation_ids = self._prepare_operations(burst_config, simulator_index, simulator_id, user_id) self.logger.debug("Starting a total of %s workflows" % (len(operation_ids, ))) wf_errs = 0 for operation_id in operation_ids: try: OperationService().launch_operation(operation_id, True) except Exception as excep: self.logger.error(excep) wf_errs += 1 self.workflow_service.mark_burst_finished(burst_config, error_message=str(excep)) self.logger.debug("Finished launching workflows. " + str(len(operation_ids) - wf_errs) + " were launched successfully, " + str(wf_errs) + " had error on pre-launch steps") except Exception as excep: self.logger.error(excep) self.workflow_service.mark_burst_finished(burst_config, error_message=str(excep)) @staticmethod def launch_visualization(visualization, frame_width=None, frame_height=None, is_preview=True): """ :param visualization: a visualization workflow step """ dynamic_params = visualization.dynamic_param static_params = visualization.static_param parameters_dict = static_params current_project_id = 0 # Current operation id needed for export mechanism. So far just use ## # the operation of the workflow_step from which the inputs are taken #### for param in dynamic_params: step_index = dynamic_params[param][WorkflowStepConfiguration.STEP_INDEX_KEY] datatype_index = dynamic_params[param][WorkflowStepConfiguration.DATATYPE_INDEX_KEY] referred_workflow_step = dao.get_workflow_step_by_step_index(visualization.fk_workflow, step_index) referred_operation_id = referred_workflow_step.fk_operation referred_operation = dao.get_operation_by_id(referred_operation_id) current_project_id = referred_operation.fk_launched_in if type(datatype_index) is IntType: # Entry is the output of a previous step ## datatypes = dao.get_results_for_operation(referred_operation_id) parameters_dict[param] = datatypes[datatype_index].gid else: # Entry is the input of a previous step ### parameters_dict[param] = json.loads(referred_operation.parameters)[datatype_index] algorithm = dao.get_algorithm_by_id(visualization.fk_algorithm) adapter_instance = ABCAdapter.build_adapter(algorithm) adapter_instance.current_project_id = current_project_id prepared_inputs = adapter_instance.prepare_ui_inputs(parameters_dict) if frame_width is not None: prepared_inputs[ABCDisplayer.PARAM_FIGURE_SIZE] = (frame_width, frame_height) if is_preview: result = adapter_instance.generate_preview(**prepared_inputs) else: result = adapter_instance.launch(**prepared_inputs) return result, parameters_dict def update_history_status(self, id_list): """ For each burst_id received in the id_list read new status from DB and return a list [id, new_status] pair. """ result = [] for b_id in id_list: burst = dao.get_burst_by_id(b_id) burst.prepare_after_load() if burst is not None: if burst.status == burst.BURST_RUNNING: running_time = datetime.now() - burst.start_time else: running_time = burst.finish_time - burst.start_time running_time = format_timedelta(running_time, most_significant2=False) if burst.status == burst.BURST_ERROR: msg = 'Check Operations page for error Message' else: msg = '' result.append([burst.id, burst.status, burst.is_group, msg, running_time]) else: self.logger.debug("Could not find burst with id=" + str(b_id) + ". Might have been deleted by user!!") return result def stop_burst(self, burst_entity): """ Stop all the entities for the current burst and set the burst status to canceled. """ burst_wfs = dao.get_workflows_for_burst(burst_entity.id) any_stopped = False for workflow in burst_wfs: wf_steps = dao.get_workflow_steps(workflow.id) for step in wf_steps: if step.fk_operation is not None: self.logger.debug("We will stop operation: %d" % step.fk_operation) any_stopped = self.operation_service.stop_operation(step.fk_operation) or any_stopped if any_stopped and burst_entity.status != burst_entity.BURST_CANCELED: self.workflow_service.mark_burst_finished(burst_entity, model.BurstConfiguration.BURST_CANCELED) return True return False @transactional def cancel_or_remove_burst(self, burst_id): """ Cancel (if burst is still running) or Remove the burst given by burst_id. :returns True when Remove operation was done and False when Cancel """ burst_entity = dao.get_burst_by_id(burst_id) if burst_entity.status == burst_entity.BURST_RUNNING: self.stop_burst(burst_entity) return False service = ProjectService() ## Remove each DataType in current burst. ## We can not leave all on cascade, because it won't work on SQLite for mapped dataTypes. datatypes = dao.get_all_datatypes_in_burst(burst_id) ## Get operations linked to current burst before removing the burst or else ## the burst won't be there to identify operations any more. remaining_ops = dao.get_operations_in_burst(burst_id) # Remove burst first to delete work-flow steps which still hold foreign keys to operations. correct = dao.remove_entity(burst_entity.__class__, burst_id) if not correct: raise RemoveDataTypeException("Could not remove Burst entity!") for datatype in datatypes: service.remove_datatype(burst_entity.fk_project, datatype.gid, False) ## Remove all Operations remained. correct = True remaining_op_groups = set() project = dao.get_project_by_id(burst_entity.fk_project) for oper in remaining_ops: is_remaining = dao.get_generic_entity(oper.__class__, oper.id) if len(is_remaining) == 0: ### Operation removed cascaded. continue if oper.fk_operation_group is not None and oper.fk_operation_group not in remaining_op_groups: is_remaining = dao.get_generic_entity(model.OperationGroup, oper.fk_operation_group) if len(is_remaining) > 0: remaining_op_groups.add(oper.fk_operation_group) correct = correct and dao.remove_entity(model.OperationGroup, oper.fk_operation_group) correct = correct and dao.remove_entity(oper.__class__, oper.id) service.structure_helper.remove_operation_data(project.name, oper.id) if not correct: raise RemoveDataTypeException("Could not remove Burst because a linked operation could not be dropped!!") return True @staticmethod def get_portlet_status(portlet_cfg): """ Get the status of a portlet configuration. """ if portlet_cfg.analyzers: for analyze_step in portlet_cfg.analyzers: operation = dao.try_get_operation_by_id(analyze_step.fk_operation) if operation is None: return model.STATUS_ERROR, "Operation has been removed" if operation.status != model.STATUS_FINISHED: return operation.status, operation.additional_info or '' else: ## Simulator is first step so now decide if we are waiting for input or output ## visualizer = portlet_cfg.visualizer wait_on_outputs = False for entry in visualizer.dynamic_param: if type(visualizer.dynamic_param[entry][WorkflowStepConfiguration.DATATYPE_INDEX_KEY]) == IntType: wait_on_outputs = True break if wait_on_outputs: simulator_step = dao.get_workflow_step_by_step_index(visualizer.fk_workflow, 0) operation = dao.try_get_operation_by_id(simulator_step.fk_operation) if operation is None: error_msg = ("At least one simulation result was not found, it might have been removed. <br\>" "You can copy and relaunch current simulation, if you are interested in having " "your results re-computed.") return model.STATUS_ERROR, error_msg else: return operation.status, operation.additional_info or '' return model.STATUS_FINISHED, ''
class OperationServiceTest(BaseTestCase): """ Test class for the introspection module. Some tests from here do async launches. For those cases Transactional tests won't work. TODO: this is still to be refactored, for being huge, with duplicates and many irrelevant checks """ def setUp(self): """ Reset the database before each test. """ self.clean_database() initialize_storage() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user) self.operation_service = OperationService() self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE def tearDown(self): """ Reset the database when test is done. """ TvbProfile.current.MAX_DISK_SPACE = self.backup_hdd_size self.clean_database() def _assert_no_dt2(self): count = dao.count_datatypes(self.test_project.id, Datatype2) self.assertEqual(0, count) def _assert_stored_dt2(self, expected_cnt=1): count = dao.count_datatypes(self.test_project.id, Datatype2) self.assertEqual(expected_cnt, count) datatype = dao.try_load_last_entity_of_type(self.test_project.id, Datatype2) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") return datatype def test_datatypes_groups(self): """ Tests if the dataType group is set correct on the dataTypes resulted from the same operation group. """ flow_service = FlowService() all_operations = dao.get_filtered_operations(self.test_project.id, None) self.assertEqual(len(all_operations), 0, "There should be no operation") algogroup = dao.find_group("tvb.tests.framework.adapters.testadapter3", "TestAdapter3") group, _ = flow_service.prepare_adapter(self.test_project.id, algogroup) adapter_instance = flow_service.build_adapter_instance(group) data = {model.RANGE_PARAMETER_1: "param_5", "param_5": [1, 2]} ## Create Group of operations flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data) all_operations = dao.get_filtered_operations(self.test_project.id, None) self.assertEqual(len(all_operations), 1, "Expected one operation group") self.assertEqual(all_operations[0][2], 2, "Expected 2 operations in group") operation_group_id = all_operations[0][3] self.assertNotEquals(operation_group_id, None, "The operation should be part of a group.") self.operation_service.stop_operation(all_operations[0][0]) self.operation_service.stop_operation(all_operations[0][1]) ## Make sure operations are executed self.operation_service.launch_operation(all_operations[0][0], False) self.operation_service.launch_operation(all_operations[0][1], False) resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id) self.assertTrue(len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes))) dt = dao.get_datatype_by_id(resulted_datatypes[0].id) datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id) self.assertEqual(dt.fk_datatype_group, datatype_group.id, "DataTypeGroup is incorrect") def test_initiate_operation(self): """ Test the actual operation flow by executing a test adapter. """ module = "tvb.tests.framework.adapters.testadapter1" class_name = "TestAdapter1" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) output = adapter.get_output() output_type = output[0].__name__ data = {"test1_val1": 5, "test1_val2": 5} tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") res = self.operation_service.initiate_operation( self.test_user, self.test_project.id, adapter, tmp_folder, **data ) self.assertTrue(res.index("has finished.") > 10, "Operation didn't finish") group = dao.find_group(module, class_name) self.assertEqual(group.module, "tvb.tests.framework.adapters.testadapter1", "Wrong data stored.") self.assertEqual(group.classname, "TestAdapter1", "Wrong data stored.") dts, count = dao.get_values_of_datatype(self.test_project.id, Datatype1) self.assertEqual(count, 1) self.assertEqual(len(dts), 1) datatype = dao.get_datatype_by_id(dts[0][0]) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") self.assertEqual(datatype.type, output_type, "Wrong data stored.") def test_delete_dt_free_HDD_space(self): """ Launch two operations and give enough available space for user so that both should finish. """ module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self._assert_no_dt2() self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) datatype = self._assert_stored_dt2() # Now free some space and relaunch ProjectService().remove_datatype(self.test_project.id, datatype.gid) self._assert_no_dt2() self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) self._assert_stored_dt2() def test_launch_two_ops_HDD_with_space(self): """ Launch two operations and give enough available space for user so that both should finish. """ module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = 2 * float(adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) datatype = self._assert_stored_dt2() # Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB) # plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float(adapter.get_required_disk_size(**data)) self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) self._assert_stored_dt2(2) def test_launch_two_ops_HDD_full_space(self): """ Launch two operations and give available space for user so that the first should finish, but after the update to the user hdd size the second should not. """ module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = 1 + float(adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) datatype = self._assert_stored_dt2() # Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB) # plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + float( adapter.get_required_disk_size(**data) - 1 ) self.assertRaises( NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user, self.test_project.id, adapter, tmp_folder, **data ) self._assert_stored_dt2() def test_launch_operation_HDD_with_space(self): """ Test the actual operation flow by executing a test adapter. """ module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) self._assert_stored_dt2() def test_launch_operation_HDD_with_space_started_ops(self): """ Test the actual operation flow by executing a test adapter. """ space_taken_by_started = 100 module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) started_operation = model.Operation( self.test_user.id, self.test_project.id, group.id, "", status=model.STATUS_STARTED, estimated_disk_size=space_taken_by_started, ) dao.store_entity(started_operation) adapter = FlowService().build_adapter_instance(group) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) self._assert_stored_dt2() def test_launch_operation_HDD_full_space(self): """ Test the actual operation flow by executing a test adapter. """ module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) - 1) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.assertRaises( NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user, self.test_project.id, adapter, tmp_folder, **data ) self._assert_no_dt2() def test_launch_operation_HDD_full_space_started_ops(self): """ Test the actual operation flow by executing a test adapter. """ space_taken_by_started = 100 module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) started_operation = model.Operation( self.test_user.id, self.test_project.id, group.id, "", status=model.STATUS_STARTED, estimated_disk_size=space_taken_by_started, ) dao.store_entity(started_operation) adapter = FlowService().build_adapter_instance(group) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started - 1) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.assertRaises( NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user, self.test_project.id, adapter, tmp_folder, **data ) self._assert_no_dt2() def test_stop_operation(self): """ Test that an operation is successfully stopped. """ module = "tvb.tests.framework.adapters.testadapter2" class_name = "TestAdapter2" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) data = {"test": 5} algo_group = adapter.algorithm_group algo_category = dao.get_category_by_id(algo_group.fk_category) algo = dao.get_algorithm_by_group(algo_group.id) operations, _ = self.operation_service.prepare_operations( self.test_user.id, self.test_project.id, algo, algo_category, {}, **data ) self.operation_service._send_to_cluster(operations, adapter) self.operation_service.stop_operation(operations[0].id) operation = dao.get_operation_by_id(operations[0].id) self.assertEqual(operation.status, model.STATUS_CANCELED, "Operation should have been canceled!") def test_stop_operation_finished(self): """ Test that an operation that is already finished is not changed by the stop operation. """ module = "tvb.tests.framework.adapters.testadapter1" class_name = "TestAdapter1" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) data = {"test1_val1": 5, "test1_val2": 5} algo_group = adapter.algorithm_group algo_category = dao.get_category_by_id(algo_group.fk_category) algo = dao.get_algorithm_by_group(algo_group.id) operations, _ = self.operation_service.prepare_operations( self.test_user.id, self.test_project.id, algo, algo_category, {}, **data ) self.operation_service._send_to_cluster(operations, adapter) operation = dao.get_operation_by_id(operations[0].id) operation.status = model.STATUS_FINISHED dao.store_entity(operation) self.operation_service.stop_operation(operations[0].id) operation = dao.get_operation_by_id(operations[0].id) self.assertEqual(operation.status, model.STATUS_FINISHED, "Operation shouldn't have been canceled!") def test_array_from_string(self): """ Simple test for parse array on 1d, 2d and 3d array. """ row = { "description": "test.", "default": "None", "required": True, "label": "test: ", "attributes": None, "quantifier": "manual", "elementType": "float", "type": "array", "options": None, "name": "test", } input_data_string = "[ [1 2 3] [4 5 6]]" output = string2array(input_data_string, " ", row["elementType"]) self.assertEqual(output.shape, (2, 3), "Dimensions not properly parsed") for i in output[0]: self.assertTrue(i in [1, 2, 3]) for i in output[1]: self.assertTrue(i in [4, 5, 6]) input_data_string = "[1, 2, 3, 4, 5, 6]" output = string2array(input_data_string, ",", row["elementType"]) self.assertEqual(output.shape, (6,), "Dimensions not properly parsed") for i in output: self.assertTrue(i in [1, 2, 3, 4, 5, 6]) input_data_string = "[ [ [1,1], [2, 2] ], [ [3 ,3], [4,4] ] ]" output = string2array(input_data_string, ",", row["elementType"]) self.assertEqual(output.shape, (2, 2, 2), "Wrong dimensions.") for i in output[0][0]: self.assertTrue(i == 1) for i in output[0][1]: self.assertTrue(i == 2) for i in output[1][0]: self.assertTrue(i == 3) for i in output[1][1]: self.assertTrue(i == 4) row = { "description": "test.", "default": "None", "required": True, "label": "test: ", "attributes": None, "quantifier": "manual", "elementType": "str", "type": "array", "options": None, "name": "test", } input_data_string = "[1, 2, 3, 4, 5, 6]" output = string2array(input_data_string, ",", row["elementType"]) for i in output: self.assertTrue(i in [1, 2, 3, 4, 5, 6]) def test_wrong_array_from_string(self): """Test that parsing an array from string is throwing the expected exception when wrong input string""" row = { "description": "test.", "default": "None", "required": True, "label": "test: ", "attributes": None, "quantifier": "manual", "elementType": "float", "type": "array", "options": None, "name": "test", } input_data_string = "[ [1,2 3] [4,5,6]]" self.assertRaises(ValueError, string2array, input_data_string, ",", row["elementType"]) input_data_string = "[ [1,2,wrong], [4, 5, 6]]" self.assertRaises(ValueError, string2array, input_data_string, ",", row["elementType"]) row = { "description": "test.", "default": "None", "required": True, "label": "test: ", "attributes": None, "quantifier": "manual", "elementType": "str", "type": "array", "options": None, "name": "test", } output = string2array(input_data_string, ",", row["elementType"]) self.assertEqual(output.shape, (2, 3)) self.assertEqual(output[0][2], "wrong", "String data not converted properly") input_data_string = "[ [1,2 3] [4,5,6]]" output = string2array(input_data_string, ",", row["elementType"]) self.assertEqual(output[0][1], "2 3") def test_reduce_dimension_component(self): """ This method tests if the data passed to the launch method of the NDimensionArrayAdapter adapter is correct. The passed data should be a list of arrays with one dimension. """ inserted_count = FlowService().get_available_datatypes( self.test_project.id, "tvb.datatypes.arrays.MappedArray" )[1] self.assertEqual(inserted_count, 0, "Expected to find no data.") # create an operation algorithm_id = ( FlowService() .get_algorithm_by_module_and_class( "tvb.tests.framework.adapters.ndimensionarrayadapter", "NDimensionArrayAdapter" )[0] .id ) operation = model.Operation( self.test_user.id, self.test_project.id, algorithm_id, "test params", meta=json.dumps({DataTypeMetaData.KEY_STATE: "RAW_DATA"}), status=model.STATUS_FINISHED, ) operation = dao.store_entity(operation) # save the array wrapper in DB adapter_instance = NDimensionArrayAdapter() PARAMS = {} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) inserted_data = FlowService().get_available_datatypes(self.test_project.id, "tvb.datatypes.arrays.MappedArray")[ 0 ] self.assertEqual(len(inserted_data), 1, "Problems when inserting data") gid = inserted_data[0][2] entity = dao.get_datatype_by_gid(gid) # from the 3D array do not select any array PARAMS = { "python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": "requiredDim_1", "input_data_dimensions_1": "", "input_data_dimensions_2": "", } try: self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) self.fail("Test should not pass. The resulted array should be a 1D array.") except Exception: # OK, do nothing; we were expecting to produce a 1D array pass # from the 3D array select only a 1D array first_dim = [gid + "_1_0", "requiredDim_1"] PARAMS = { "python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": first_dim, "input_data_dimensions_1": gid + "_2_1", } self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) expected_result = entity.array_data[:, 0, 1] actual_result = adapter_instance.launch_param self.assertEqual(len(actual_result), len(expected_result), "Not the same size for results!") self.assertTrue(numpy.equal(actual_result, expected_result).all()) # from the 3D array select a 2D array first_dim = [gid + "_1_0", gid + "_1_1", "requiredDim_2"] PARAMS = { "python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": first_dim, "input_data_dimensions_1": gid + "_2_1", } self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) expected_result = entity.array_data[slice(0, None), [0, 1], 1] actual_result = adapter_instance.launch_param self.assertEqual(len(actual_result), len(expected_result), "Not the same size for results!") self.assertTrue(numpy.equal(actual_result, expected_result).all()) # from 3D array select 1D array by applying SUM function on the first # dimension and average function on the second dimension PARAMS = { "python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": ["requiredDim_1", "func_sum"], "input_data_dimensions_1": "func_average", "input_data_dimensions_2": "", } self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) aux = numpy.sum(entity.array_data, axis=0) expected_result = numpy.average(aux, axis=0) actual_result = adapter_instance.launch_param self.assertEqual(len(actual_result), len(expected_result), "Not the same size of results!") self.assertTrue(numpy.equal(actual_result, expected_result).all()) # from 3D array select a 2D array and apply op. on the second dimension PARAMS = { "python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": ["requiredDim_2", "func_sum", "expected_shape_x,512", "operations_x,>"], "input_data_dimensions_1": "", "input_data_dimensions_2": "", } try: self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) self.fail("Test should not pass! The second dimension of the array should be >512.") except Exception: # OK, do nothing; pass
class TestOperationService(BaseTestCase): """ Test class for the introspection module. Some tests from here do async launches. For those cases Transactional tests won't work. TODO: this is still to be refactored, for being huge, with duplicates and many irrelevant checks """ def setup_method(self): """ Reset the database before each test. """ self.clean_database() initialize_storage() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user) self.operation_service = OperationService() self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE def teardown_method(self): """ Reset the database when test is done. """ TvbProfile.current.MAX_DISK_SPACE = self.backup_hdd_size self.clean_database() def _assert_no_dt2(self): count = dao.count_datatypes(self.test_project.id, DummyDataTypeIndex) assert 0 == count def _assert_stored_dt2(self, expected_cnt=1): count = dao.count_datatypes(self.test_project.id, Datatype2) assert expected_cnt == count datatype = dao.try_load_last_entity_of_type(self.test_project.id, Datatype2) assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored." return datatype def test_datatypes_groups(self, test_adapter_factory): """ Tests if the dataType group is set correct on the dataTypes resulted from the same operation group. """ all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 0, "There should be no operation" test_adapter_factory(TestAdapter3) algo = dao.get_algorithm_by_module( 'tvb.tests.framework.adapters.testadapter3', 'TestAdapter3') adapter_instance = ABCAdapter.build_adapter(algo) data = {model_burst.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]} ## Create Group of operations FlowService().fire_operation(adapter_instance, self.test_user, self.test_project.id, **data) all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 1, "Expected one operation group" assert all_operations[0][2] == 2, "Expected 2 operations in group" operation_group_id = all_operations[0][3] assert operation_group_id != None, "The operation should be part of a group." self.operation_service.stop_operation(all_operations[0][0]) self.operation_service.stop_operation(all_operations[0][1]) ## Make sure operations are executed self.operation_service.launch_operation(all_operations[0][0], False) self.operation_service.launch_operation(all_operations[0][1], False) resulted_datatypes = dao.get_datatype_in_group( operation_group_id=operation_group_id) assert len( resulted_datatypes) >= 2, "Expected at least 2, but: " + str( len(resulted_datatypes)) dt = dao.get_datatype_by_id(resulted_datatypes[0].id) datatype_group = dao.get_datatypegroup_by_op_group_id( operation_group_id) assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect" def test_initiate_operation(self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ module = "tvb.tests.framework.adapters.testadapter1" class_name = "TestAdapter1" test_adapter_factory() adapter = TestFactory.create_adapter(module, class_name) output = adapter.get_output() output_type = output[0].__name__ data = {"test1_val1": 5, "test1_val2": 5} tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, **data) group = dao.get_algorithm_by_module(module, class_name) assert group.module == 'tvb.tests.framework.adapters.testadapter1', "Wrong data stored." assert group.classname == 'TestAdapter1', "Wrong data stored." dts, count = dao.get_values_of_datatype(self.test_project.id, DummyDataTypeIndex) assert count == 1 assert len(dts) == 1 datatype = dao.get_datatype_by_id(dts[0][0]) assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored." assert datatype.type == output_type, "Wrong data stored." def test_delete_dt_free_hdd_space(self, test_adapter_factory, operation_factory): """ Launch two operations and give enough available space for user so that both should finish. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self._assert_no_dt2() self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, **data) datatype = self._assert_stored_dt2() # Now free some space and relaunch ProjectService().remove_datatype(self.test_project.id, datatype.gid) self._assert_no_dt2() self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, **data) self._assert_stored_dt2() def test_launch_two_ops_hdd_with_space(self): """ Launch two operations and give enough available space for user so that both should finish. """ adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = 2 * float( adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, **data) datatype = self._assert_stored_dt2() #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB) #plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float( adapter.get_required_disk_size(**data)) self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, **data) self._assert_stored_dt2(2) def test_launch_two_ops_hdd_full_space(self): """ Launch two operations and give available space for user so that the first should finish, but after the update to the user hdd size the second should not. """ adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = ( 1 + float(adapter.get_required_disk_size(**data))) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, **data) datatype = self._assert_stored_dt2() #Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB) #plus what is estimated to be required from the next one (transform from B to MB) TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \ float(adapter.get_required_disk_size(**data) - 1) with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, **data) self._assert_stored_dt2() def test_launch_operation_hdd_with_space(self): """ Test the actual operation flow by executing a test adapter. """ adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, **data) self._assert_stored_dt2() def test_launch_operation_hdd_with_space_started_ops( self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) space_taken_by_started = 100 adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") form = TestAdapterHDDRequiredForm() form.fill_from_post({'_test': "100"}) adapter.submit_form(form) started_operation = model_operation.Operation( self.test_user.id, self.test_project.id, adapter.stored_adapter.id, "", status=model_operation.STATUS_STARTED, estimated_disk_size=space_taken_by_started) dao.store_entity(started_operation) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(**data) + space_taken_by_started) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, **data) self._assert_stored_dt2() def test_launch_operation_hdd_full_space(self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") form = TestAdapterHDDRequiredForm() adapter.submit_form(form) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(**data) - 1) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, **data) self._assert_no_dt2() def test_launch_operation_hdd_full_space_started_ops( self, test_adapter_factory): """ Test the actual operation flow by executing a test adapter. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) space_taken_by_started = 100 adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") form = TestAdapterHDDRequiredForm() adapter.submit_form(form) started_operation = model_operation.Operation( self.test_user.id, self.test_project.id, adapter.stored_adapter.id, "", status=model_operation.STATUS_STARTED, estimated_disk_size=space_taken_by_started) dao.store_entity(started_operation) data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(**data) + space_taken_by_started - 1) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") with pytest.raises(NoMemoryAvailableException): self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder, **data) self._assert_no_dt2() def test_stop_operation(self, test_adapter_factory): """ Test that an operation is successfully stopped. """ test_adapter_factory(adapter_class=TestAdapter2) adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter2", "TestAdapter2") data = {"test": 5} algo = adapter.stored_adapter algo_category = dao.get_category_by_id(algo.fk_category) operations, _ = self.operation_service.prepare_operations( self.test_user.id, self.test_project, algo, algo_category, {}, **data) self.operation_service._send_to_cluster(operations, adapter) self.operation_service.stop_operation(operations[0].id) operation = dao.get_operation_by_id(operations[0].id) assert operation.status, model_operation.STATUS_CANCELED == "Operation should have been canceled!" def test_stop_operation_finished(self, test_adapter_factory): """ Test that an operation that is already finished is not changed by the stop operation. """ test_adapter_factory() adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter1", "TestAdapter1") data = {"test1_val1": 5, 'test1_val2': 5} algo = adapter.stored_adapter algo_category = dao.get_category_by_id(algo.fk_category) operations, _ = self.operation_service.prepare_operations( self.test_user.id, self.test_project, algo, algo_category, {}, **data) self.operation_service._send_to_cluster(operations, adapter) operation = dao.get_operation_by_id(operations[0].id) operation.status = model_operation.STATUS_FINISHED dao.store_entity(operation) self.operation_service.stop_operation(operations[0].id) operation = dao.get_operation_by_id(operations[0].id) assert operation.status, model_operation.STATUS_FINISHED == "Operation shouldn't have been canceled!" def test_array_from_string(self): """ Simple test for parse array on 1d, 2d and 3d array. """ row = { 'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'elementType': 'float', 'type': 'array', 'options': None, 'name': 'test' } input_data_string = '[ [1 2 3] [4 5 6]]' output = string2array(input_data_string, ' ', row['elementType']) assert output.shape, (2, 3) == "Dimensions not properly parsed" for i in output[0]: assert i in [1, 2, 3] for i in output[1]: assert i in [4, 5, 6] input_data_string = '[1, 2, 3, 4, 5, 6]' output = string2array(input_data_string, ',', row['elementType']) assert output.shape == (6, ), "Dimensions not properly parsed" for i in output: assert i in [1, 2, 3, 4, 5, 6] input_data_string = '[ [ [1,1], [2, 2] ], [ [3 ,3], [4,4] ] ]' output = string2array(input_data_string, ',', row['elementType']) assert output.shape == (2, 2, 2), "Wrong dimensions." for i in output[0][0]: assert i == 1 for i in output[0][1]: assert i == 2 for i in output[1][0]: assert i == 3 for i in output[1][1]: assert i == 4 row = { 'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'elementType': 'str', 'type': 'array', 'options': None, 'name': 'test' } input_data_string = '[1, 2, 3, 4, 5, 6]' output = string2array(input_data_string, ',', row['elementType']) for i in output: assert i in [1, 2, 3, 4, 5, 6] def test_wrong_array_from_string(self): """Test that parsing an array from string is throwing the expected exception when wrong input string""" row = { 'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'elementType': 'float', 'type': 'array', 'options': None, 'name': 'test' } input_data_string = '[ [1,2 3] [4,5,6]]' with pytest.raises(ValueError): string2array(input_data_string, ',', row['elementType']) input_data_string = '[ [1,2,wrong], [4, 5, 6]]' with pytest.raises(ValueError): string2array(input_data_string, ',', row['elementType']) row = { 'description': 'test.', 'default': 'None', 'required': True, 'label': 'test: ', 'attributes': None, 'elementType': 'str', 'type': 'array', 'options': None, 'name': 'test' } output = string2array(input_data_string, ',', row['elementType']) assert output.shape == (2, 3) assert output[0][2] == 'wrong', 'String data not converted properly' input_data_string = '[ [1,2 3] [4,5,6]]' output = string2array(input_data_string, ',', row['elementType']) assert output[0][1] == '2 3' def test_reduce_dimension_component(self): """ This method tests if the data passed to the launch method of the NDimensionArrayAdapter adapter is correct. The passed data should be a list of arrays with one dimension. """ inserted_count = FlowService().get_available_datatypes( self.test_project.id, "tvb.datatypes.arrays.MappedArray")[1] assert inserted_count == 0, "Expected to find no data." #create an operation algorithm_id = FlowService().get_algorithm_by_module_and_class( 'tvb.tests.framework.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter').id operation = model_operation.Operation( self.test_user.id, self.test_project.id, algorithm_id, 'test params', meta=json.dumps({DataTypeMetaData.KEY_STATE: "RAW_DATA"}), status=model_operation.STATUS_FINISHED) operation = dao.store_entity(operation) #save the array wrapper in DB adapter_instance = NDimensionArrayAdapter() PARAMS = {} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) inserted_data = FlowService().get_available_datatypes( self.test_project.id, "tvb.datatypes.arrays.MappedArray")[0] assert len(inserted_data) == 1, "Problems when inserting data" gid = inserted_data[0][2] entity = dao.get_datatype_by_gid(gid) #from the 3D array do not select any array PARAMS = { "python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": "requiredDim_1", "input_data_dimensions_1": "", "input_data_dimensions_2": "" } try: self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) raise AssertionError( "Test should not pass. The resulted array should be a 1D array." ) except Exception: # OK, do nothing; we were expecting to produce a 1D array pass #from the 3D array select only a 1D array first_dim = [gid + '_1_0', 'requiredDim_1'] PARAMS = { "python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": first_dim, "input_data_dimensions_1": gid + "_2_1" } self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) expected_result = entity.array_data[:, 0, 1] actual_result = adapter_instance.launch_param assert len(actual_result) == len( expected_result), "Not the same size for results!" assert numpy.equal(actual_result, expected_result).all() #from the 3D array select a 2D array first_dim = [gid + '_1_0', gid + '_1_1', 'requiredDim_2'] PARAMS = { "python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": first_dim, "input_data_dimensions_1": gid + "_2_1" } self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) expected_result = entity.array_data[slice(0, None), [0, 1], 1] actual_result = adapter_instance.launch_param assert len(actual_result) == len( expected_result), "Not the same size for results!" assert numpy.equal(actual_result, expected_result).all() #from 3D array select 1D array by applying SUM function on the first #dimension and average function on the second dimension PARAMS = { "python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": ["requiredDim_1", "func_sum"], "input_data_dimensions_1": "func_average", "input_data_dimensions_2": "" } self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) aux = numpy.sum(entity.array_data, axis=0) expected_result = numpy.average(aux, axis=0) actual_result = adapter_instance.launch_param assert len(actual_result) == len( expected_result), "Not the same size of results!" assert numpy.equal(actual_result, expected_result).all() #from 3D array select a 2D array and apply op. on the second dimension PARAMS = { "python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": [ "requiredDim_2", "func_sum", "expected_shape_x,512", "operations_x,>" ], "input_data_dimensions_1": "", "input_data_dimensions_2": "" } try: self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) raise AssertionError( "Test should not pass! The second dimension of the array should be >512." ) except Exception: # OK, do nothing; pass