示例#1
0
 def create_group(test_user=None, test_project=None, subject="John Doe"):
     """
     Create a group of 2 operations, each with at least one resultant DataType.
     """
     if test_user is None:
         test_user = TestFactory.create_user()  
     if test_project is None:
         test_project = TestFactory.create_project(test_user)
        
     ### Retrieve Adapter instance 
     algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id) 
     
     adapter_inst = TestFactory.create_adapter(algo_group=algo_group, test_project=test_project)
     adapter_inst.meta_data = {DataTypeMetaData.KEY_SUBJECT: subject}
     args = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
     
     ### Prepare Operations group. Execute them synchronously
     service = OperationService()
     operations = service.prepare_operations(test_user.id, test_project.id, algo, algo_category, {}, **args)[0]
     service.launch_operation(operations[0].id, False, adapter_inst)
     service.launch_operation(operations[1].id, False, adapter_inst)
     
     resulted_dts = dao.get_datatype_in_group(operation_group_id=operations[0].fk_operation_group)
     return resulted_dts, operations[0].fk_operation_group
示例#2
0
    def create_group(test_user=None, test_project=None, subject="John Doe"):
        """
        Create a group of 2 operations, each with at least one resultant DataType.
        """
        if test_user is None:
            test_user = TestFactory.create_user()
        if test_project is None:
            test_project = TestFactory.create_project(test_user)

        adapter_inst = TestFactory.create_adapter(
            'tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        adapter_inst.meta_data = {DataTypeMetaData.KEY_SUBJECT: subject}
        args = {RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
        algo = adapter_inst.stored_adapter
        algo_category = dao.get_category_by_id(algo.fk_category)

        # Prepare Operations group. Execute them synchronously
        service = OperationService()
        operations = service.prepare_operations(test_user.id, test_project,
                                                algo, algo_category, {},
                                                **args)[0]
        service.launch_operation(operations[0].id, False, adapter_inst)
        service.launch_operation(operations[1].id, False, adapter_inst)

        resulted_dts = dao.get_datatype_in_group(
            operation_group_id=operations[0].fk_operation_group)
        return resulted_dts, operations[0].fk_operation_group
示例#3
0
    def create_group(test_user=None, test_project=None, subject="John Doe"):
        """
        Create a group of 2 operations, each with at least one resultant DataType.
        """
        if test_user is None:
            test_user = TestFactory.create_user()
        if test_project is None:
            test_project = TestFactory.create_project(test_user)

        adapter_inst = TestFactory.create_adapter('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        adapter_inst.generic_attributes.subject = subject

        view_model = adapter_inst.get_view_model()()
        args = {RANGE_PARAMETER_1: 'param_5', 'param_5': json.dumps({constants.ATT_MINVALUE: 1,
                                                                     constants.ATT_MAXVALUE: 2.1,
                                                                     constants.ATT_STEP: 1})}
        algo = adapter_inst.stored_adapter
        algo_category = dao.get_category_by_id(algo.fk_category)

        # Prepare Operations group. Execute them synchronously
        service = OperationService()
        operations = service.prepare_operations(test_user.id, test_project, algo, algo_category,
                                                view_model=view_model, **args)[0]
        service.launch_operation(operations[0].id, False, adapter_inst)
        service.launch_operation(operations[1].id, False, adapter_inst)

        resulted_dts = dao.get_datatype_in_group(operation_group_id=operations[0].fk_operation_group)
        return resulted_dts, operations[0].fk_operation_group
class WorkflowTest(TransactionalTestCase):
    """
    Test that workflow conversion methods are valid.
    """
    def setUp(self):
        """
        Sets up the testing environment;
        saves config file;
        creates a test user, a test project;
        creates burst, operation, flow and workflow services
        """
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)
        self.workflow_service = WorkflowService()
        self.burst_service = BurstService()
        self.operation_service = OperationService()
        self.flow_service = FlowService()

    def tearDown(self):
        """
        Remove project folders and clean up database.
        """
        FilesHelper().remove_project_structure(self.test_project.name)
        self.delete_project_folders()

    def __create_complex_workflow(self, workflow_step_list):
        """
        Creates a burst with a complex workflow with a given list of workflow steps.
        :param workflow_step_list: a list of workflow steps that will be used in the
            creation of a new workflow for a new burst
        """
        burst_config = TestFactory.store_burst(self.test_project.id)

        stored_dt = datatypes_factory.DatatypesFactory()._store_datatype(
            Datatype1())

        first_step_algorithm = self.flow_service.get_algorithm_by_module_and_class(
            "tvb.tests.framework.adapters.testadapter1",
            "TestAdapterDatatypeInput")
        metadata = {DataTypeMetaData.KEY_BURST: burst_config.id}
        kwargs = {"test_dt_input": stored_dt.gid, 'test_non_dt_input': '0'}
        operations, group = self.operation_service.prepare_operations(
            self.test_user.id, self.test_project.id, first_step_algorithm,
            first_step_algorithm.algorithm_category, metadata, **kwargs)

        workflows = self.workflow_service.create_and_store_workflow(
            project_id=self.test_project.id,
            burst_id=burst_config.id,
            simulator_index=0,
            simulator_id=first_step_algorithm.id,
            operations=operations)
        self.operation_service.prepare_operations_for_workflowsteps(
            workflow_step_list, workflows, self.test_user.id, burst_config.id,
            self.test_project.id, group, operations)
        #fire the first op
        if len(operations) > 0:
            self.operation_service.launch_operation(operations[0].id, False)
        return burst_config.id

    def test_workflow_generation(self):
        """
        A simple test just for the fact that a workflow is created an ran, 
        no dynamic parameters are passed. In this case we create a two steps
        workflow: step1 - tvb.tests.framework.adapters.testadapter2.TestAdapter2
                  step2 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
        The first adapter doesn't return anything and the second returns one
        tvb.datatypes.datatype1.Datatype1 instance. We check that the steps
        are actually ran by checking that two operations are created and that
        one dataType is stored.
        """
        workflow_step_list = [
            TestFactory.create_workflow_step(
                "tvb.tests.framework.adapters.testadapter2",
                "TestAdapter2",
                step_index=1,
                static_kwargs={"test2": 2}),
            TestFactory.create_workflow_step(
                "tvb.tests.framework.adapters.testadapter1",
                "TestAdapter1",
                step_index=2,
                static_kwargs={
                    "test1_val1": 1,
                    "test1_val2": 1
                })
        ]
        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_in_project(self.test_project.id)
        self.assertTrue(
            len(stored_datatypes) == 2,
            "DataType from second step was not stored.")
        self.assertTrue(stored_datatypes[0].type == 'Datatype1',
                        "Wrong type was stored.")
        self.assertTrue(stored_datatypes[1].type == 'Datatype1',
                        "Wrong type was stored.")

        finished, started, error, _, _ = dao.get_operation_numbers(
            self.test_project.id)
        self.assertEqual(
            finished, 3,
            "Didnt start operations for both adapters in workflow.")
        self.assertEqual(started, 0,
                         "Some operations from workflow didnt finish.")
        self.assertEqual(error, 0,
                         "Some operations finished with error status.")

    def test_workflow_dynamic_params(self):
        """
        A simple test just for the fact that dynamic parameters are passed properly
        between two workflow steps: 
                  step1 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
                  step2 - tvb.tests.framework.adapters.testadapter3.TestAdapter3
        The first adapter returns a tvb.datatypes.datatype1.Datatype1 instance. 
        The second adapter has this passed as a dynamic workflow parameter.
        We check that the steps are actually ran by checking that two operations 
        are created and that two dataTypes are stored.
        """
        workflow_step_list = [
            TestFactory.create_workflow_step(
                "tvb.tests.framework.adapters.testadapter1",
                "TestAdapter1",
                step_index=1,
                static_kwargs={
                    "test1_val1": 1,
                    "test1_val2": 1
                }),
            TestFactory.create_workflow_step(
                "tvb.tests.framework.adapters.testadapter3",
                "TestAdapter3",
                step_index=2,
                dynamic_kwargs={
                    "test": {
                        wf_cfg.DATATYPE_INDEX_KEY: 0,
                        wf_cfg.STEP_INDEX_KEY: 1
                    }
                })
        ]

        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_in_project(self.test_project.id)
        self.assertTrue(
            len(stored_datatypes) == 3,
            "DataType from all step were not stored.")
        for result_row in stored_datatypes:
            self.assertTrue(result_row.type in ['Datatype1', 'Datatype2'],
                            "Wrong type was stored.")

        finished, started, error, _, _ = dao.get_operation_numbers(
            self.test_project.id)
        self.assertEqual(
            finished, 3,
            "Didn't start operations for both adapters in workflow.")
        self.assertEqual(started, 0,
                         "Some operations from workflow didn't finish.")
        self.assertEqual(error, 0,
                         "Some operations finished with error status.")

    def test_configuration2workflow(self):
        """
        Test that building a WorkflowStep from a WorkflowStepConfiguration. Make sure all the data is
        correctly passed. Also check that any base_wf_step is incremented to dynamic parameters step index.
        """
        workflow_step = TestFactory.create_workflow_step(
            "tvb.tests.framework.adapters.testadapter1",
            "TestAdapter1",
            static_kwargs={"static_param": "test"},
            dynamic_kwargs={
                "dynamic_param": {
                    wf_cfg.STEP_INDEX_KEY: 0,
                    wf_cfg.DATATYPE_INDEX_KEY: 0
                }
            },
            step_index=1,
            base_step=5)
        self.assertEqual(workflow_step.step_index, 1,
                         "Wrong step index in created workflow step.")
        self.assertEqual(workflow_step.static_param, {'static_param': 'test'},
                         'Different static parameters on step.')
        self.assertEqual(
            workflow_step.dynamic_param, {
                'dynamic_param': {
                    wf_cfg.STEP_INDEX_KEY: 5,
                    wf_cfg.DATATYPE_INDEX_KEY: 0
                }
            },
            "Dynamic parameters not saved properly, or base workflow index not added to step index."
        )

    def test_create_workflow(self):
        """
        Test that a workflow with all the associated workflow steps is actually created.
        """
        workflow_step_list = [
            TestFactory.create_workflow_step(
                "tvb.tests.framework.adapters.testadapter2",
                "TestAdapter2",
                step_index=1,
                static_kwargs={"test2": 2}),
            TestFactory.create_workflow_step(
                "tvb.tests.framework.adapters.testadapter1",
                "TestAdapter1",
                step_index=2,
                static_kwargs={
                    "test1_val1": 1,
                    "test1_val2": 1
                })
        ]
        burst_id = self.__create_complex_workflow(workflow_step_list)
        workflow_entities = dao.get_workflows_for_burst(burst_id)
        self.assertTrue(
            len(workflow_entities) == 1,
            "For some reason workflow was not stored in database.")
        workflow_steps = dao.get_workflow_steps(workflow_entities[0].id)
        self.assertEqual(len(workflow_steps),
                         len(workflow_step_list) + 1,
                         "Wrong number of workflow steps created.")
示例#5
0
class TestOperationService(BaseTestCase):
    """
    Test class for the introspection module. Some tests from here do async launches. For those
    cases Transactional tests won't work.
    """

    def setup_method(self):
        """
        Reset the database before each test.
        """
        self.clean_database()
        initialize_storage()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)
        self.operation_service = OperationService()
        self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE

    def teardown_method(self):
        """
        Reset the database when test is done.
        """
        TvbProfile.current.MAX_DISK_SPACE = self.backup_hdd_size
        self.clean_database()

    def _assert_no_ddti(self):
        count = dao.count_datatypes(self.test_project.id, DummyDataTypeIndex)
        assert 0 == count

    def _assert_stored_ddti(self, expected_cnt=1):
        count = dao.count_datatypes(self.test_project.id, DummyDataTypeIndex)
        assert expected_cnt == count
        datatype = dao.try_load_last_entity_of_type(self.test_project.id, DummyDataTypeIndex)
        assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored."
        return datatype

    def test_datatypes_groups(self, test_adapter_factory):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        # TODO: re-write this to use groups correctly
        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        assert len(all_operations) == 0, "There should be no operation"

        algo = test_adapter_factory(TestAdapter3)
        adapter_instance = ABCAdapter.build_adapter(algo)
        data = {model_burst.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
        ## Create Group of operations
        FlowService().fire_operation(adapter_instance, self.test_user, self.test_project.id)

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        assert len(all_operations) == 1, "Expected one operation group"
        assert all_operations[0][2] == 2, "Expected 2 operations in group"

        operation_group_id = all_operations[0][3]
        assert operation_group_id != None, "The operation should be part of a group."

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
        assert len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
        assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect"

    def test_initiate_operation(self, test_adapter_factory):
        """
        Test the actual operation flow by executing a test adapter.
        """
        module = "tvb.tests.framework.adapters.testadapter1"
        class_name = "TestAdapter1"
        test_adapter_factory()
        adapter = TestFactory.create_adapter(module, class_name)
        output = adapter.get_output()
        output_type = output[0].__name__
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")

        view_model = adapter.get_view_model()()
        view_model.test1_val1 = 5
        view_model.test1_val2 = 5
        self.operation_service.initiate_operation(self.test_user, self.test_project, adapter,
                                                  tmp_folder, model_view=view_model)

        group = dao.get_algorithm_by_module(module, class_name)
        assert group.module == 'tvb.tests.framework.adapters.testadapter1', "Wrong data stored."
        assert group.classname == 'TestAdapter1', "Wrong data stored."
        dts, count = dao.get_values_of_datatype(self.test_project.id, DummyDataTypeIndex)
        assert count == 1
        assert len(dts) == 1
        datatype = dao.get_datatype_by_id(dts[0][0])
        assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored."
        assert datatype.type == output_type, "Wrong data stored."

    def test_delete_dt_free_hdd_space(self, test_adapter_factory, operation_factory):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        test_adapter_factory(adapter_class=TestAdapterHDDRequired)
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
        view_model = adapter.get_view_model()()
        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(view_model))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")

        self._assert_no_ddti()
        self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder,
                                                  model_view=view_model)
        datatype = self._assert_stored_ddti()

        # Now free some space and relaunch
        ProjectService().remove_datatype(self.test_project.id, datatype.gid)
        self._assert_no_ddti()
        self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder,
                                                  model_view=view_model)
        self._assert_stored_ddti()

    def test_launch_two_ops_hdd_with_space(self, test_adapter_factory):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        test_adapter_factory(adapter_class=TestAdapterHDDRequired)
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
        view_model = adapter.get_view_model()()
        TvbProfile.current.MAX_DISK_SPACE = 2 * float(adapter.get_required_disk_size(view_model))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")

        self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder,
                                                  model_view=view_model)
        datatype = self._assert_stored_ddti()

        # Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB)
        # plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float(
            adapter.get_required_disk_size(view_model))

        self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder,
                                                  model_view=view_model)
        self._assert_stored_ddti(2)

    def test_launch_two_ops_hdd_full_space(self):
        """
        Launch two operations and give available space for user so that the first should finish,
        but after the update to the user hdd size the second should not.
        """
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
        view_model = adapter.get_view_model()()

        TvbProfile.current.MAX_DISK_SPACE = (1 + float(adapter.get_required_disk_size(view_model)))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder,
                                                  model_view=view_model)

        datatype = self._assert_stored_ddti()
        # Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB)
        # plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \
                                            float(adapter.get_required_disk_size(view_model) - 1)

        with pytest.raises(NoMemoryAvailableException):
            self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder,
                                                      model_view=view_model)
        self._assert_stored_ddti()

    def test_launch_operation_hdd_with_space(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
        view_model = adapter.get_view_model()()

        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(view_model))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder,
                                                  model_view=view_model)
        self._assert_stored_ddti()

    def test_launch_operation_hdd_with_space_started_ops(self, test_adapter_factory):
        """
        Test the actual operation flow by executing a test adapter.
        """
        test_adapter_factory(adapter_class=TestAdapterHDDRequired)

        space_taken_by_started = 100
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
        form = TestAdapterHDDRequiredForm()
        adapter.submit_form(form)
        started_operation = model_operation.Operation(self.test_user.id, self.test_project.id,
                                                      adapter.stored_adapter.id, "",
                                                      status=model_operation.STATUS_STARTED,
                                                      estimated_disk_size=space_taken_by_started)
        view_model = adapter.get_view_model()()

        dao.store_entity(started_operation)
        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(view_model) + space_taken_by_started)
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder,
                                                  model_view=view_model)
        self._assert_stored_ddti()

    def test_launch_operation_hdd_full_space(self, test_adapter_factory):
        """
        Test the actual operation flow by executing a test adapter.
        """
        test_adapter_factory(adapter_class=TestAdapterHDDRequired)

        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
        form = TestAdapterHDDRequiredForm()
        adapter.submit_form(form)
        view_model = adapter.get_view_model()()

        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(view_model) - 1)
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        with pytest.raises(NoMemoryAvailableException):
            self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder,
                                                      model_view=view_model)
        self._assert_no_ddti()

    def test_launch_operation_hdd_full_space_started_ops(self, test_adapter_factory):
        """
        Test the actual operation flow by executing a test adapter.
        """
        test_adapter_factory(adapter_class=TestAdapterHDDRequired)

        space_taken_by_started = 100
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
        form = TestAdapterHDDRequiredForm()
        adapter.submit_form(form)
        started_operation = model_operation.Operation(self.test_user.id, self.test_project.id,
                                                      adapter.stored_adapter.id, "",
                                                      status=model_operation.STATUS_STARTED,
                                                      estimated_disk_size=space_taken_by_started)
        view_model = adapter.get_view_model()()

        dao.store_entity(started_operation)
        TvbProfile.current.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(view_model) + space_taken_by_started - 1)
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        with pytest.raises(NoMemoryAvailableException):
            self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, tmp_folder,
                                                      model_view=view_model)
        self._assert_no_ddti()

    def test_stop_operation(self, test_adapter_factory):
        """
        Test that an operation is successfully stopped.
        """
        test_adapter_factory(adapter_class=TestAdapter2)
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter2", "TestAdapter2")
        view_model = adapter.get_view_model()()
        view_model.test = 5
        algo = adapter.stored_adapter
        algo_category = dao.get_category_by_id(algo.fk_category)
        operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project, algo,
                                                                  algo_category, {},
                                                                  view_model=view_model)
        self.operation_service._send_to_cluster(operations, adapter)
        self.operation_service.stop_operation(operations[0].id)
        operation = dao.get_operation_by_id(operations[0].id)
        assert operation.status, model_operation.STATUS_CANCELED == "Operation should have been canceled!"

    def test_stop_operation_finished(self, test_adapter_factory):
        """
        Test that an operation that is already finished is not changed by the stop operation.
        """
        test_adapter_factory()
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter1", "TestAdapter1")
        view_model = adapter.get_view_model()()
        view_model.test1_val1 = 5
        view_model.test1_val2 = 5
        algo = adapter.stored_adapter
        algo_category = dao.get_category_by_id(algo.fk_category)
        operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project, algo,
                                                                  algo_category, {}, view_model=view_model)
        self.operation_service._send_to_cluster(operations, adapter)
        operation = dao.get_operation_by_id(operations[0].id)
        operation.status = model_operation.STATUS_FINISHED
        dao.store_entity(operation)
        self.operation_service.stop_operation(operations[0].id)
        operation = dao.get_operation_by_id(operations[0].id)
        assert operation.status, model_operation.STATUS_FINISHED == "Operation shouldn't have been canceled!"
class TestWorkflow(TransactionalTestCase):
    """
    Test that workflow conversion methods are valid.
    """


    def transactional_setup_method(self):
        """
        Sets up the testing environment;
        saves config file;
        creates a test user, a test project;
        creates burst, operation, flow and workflow services
        """
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)
        self.workflow_service = WorkflowService()
        self.burst_service = BurstService()
        self.operation_service = OperationService()
        self.flow_service = FlowService()


    def transactional_teardown_method(self):
        """
        Remove project folders and clean up database.
        """
        FilesHelper().remove_project_structure(self.test_project.name)
        self.delete_project_folders()


    def __create_complex_workflow(self, workflow_step_list):
        """
        Creates a burst with a complex workflow with a given list of workflow steps.
        :param workflow_step_list: a list of workflow steps that will be used in the
            creation of a new workflow for a new burst
        """
        burst_config = TestFactory.store_burst(self.test_project.id)

        stored_dt = datatypes_factory.DatatypesFactory()._store_datatype(Datatype1())

        first_step_algorithm = self.flow_service.get_algorithm_by_module_and_class("tvb.tests.framework.adapters.testadapter1",
                                                                                   "TestAdapterDatatypeInput")
        metadata = {DataTypeMetaData.KEY_BURST: burst_config.id}
        kwargs = {"test_dt_input": stored_dt.gid, 'test_non_dt_input': '0'}
        operations, group = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id,
                                                                      first_step_algorithm,
                                                                      first_step_algorithm.algorithm_category,
                                                                      metadata, **kwargs)

        workflows = self.workflow_service.create_and_store_workflow(project_id=self.test_project.id,
                                                                    burst_id=burst_config.id,
                                                                    simulator_index=0,
                                                                    simulator_id=first_step_algorithm.id,
                                                                    operations=operations)
        self.operation_service.prepare_operations_for_workflowsteps(workflow_step_list, workflows, self.test_user.id,
                                                                    burst_config.id, self.test_project.id, group,
                                                                    operations)
        #fire the first op
        if len(operations) > 0:
            self.operation_service.launch_operation(operations[0].id, False)
        return burst_config.id


    def test_workflow_generation(self):
        """
        A simple test just for the fact that a workflow is created an ran, 
        no dynamic parameters are passed. In this case we create a two steps
        workflow: step1 - tvb.tests.framework.adapters.testadapter2.TestAdapter2
                  step2 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
        The first adapter doesn't return anything and the second returns one
        tvb.datatypes.datatype1.Datatype1 instance. We check that the steps
        are actually ran by checking that two operations are created and that
        one dataType is stored.
        """
        workflow_step_list = [TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter2",
                                                               "TestAdapter2", step_index=1,
                                                               static_kwargs={"test2": 2}),
                              TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1",
                                                               "TestAdapter1", step_index=2,
                                                               static_kwargs={"test1_val1": 1, "test1_val2": 1})]
        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_in_project(self.test_project.id)
        assert  len(stored_datatypes) == 2, "DataType from second step was not stored."
        assert  stored_datatypes[0].type == 'Datatype1', "Wrong type was stored."
        assert  stored_datatypes[1].type == 'Datatype1', "Wrong type was stored."

        finished, started, error, _, _ = dao.get_operation_numbers(self.test_project.id)
        assert  finished == 3, "Didnt start operations for both adapters in workflow."
        assert  started == 0, "Some operations from workflow didnt finish."
        assert  error == 0, "Some operations finished with error status."


    def test_workflow_dynamic_params(self):
        """
        A simple test just for the fact that dynamic parameters are passed properly
        between two workflow steps: 
                  step1 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
                  step2 - tvb.tests.framework.adapters.testadapter3.TestAdapter3
        The first adapter returns a tvb.datatypes.datatype1.Datatype1 instance. 
        The second adapter has this passed as a dynamic workflow parameter.
        We check that the steps are actually ran by checking that two operations 
        are created and that two dataTypes are stored.
        """
        workflow_step_list = [TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1",
                                                               "TestAdapter1", step_index=1,
                                                               static_kwargs={"test1_val1": 1, "test1_val2": 1}),
                              TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter3",
                                                               "TestAdapter3", step_index=2,
                                                               dynamic_kwargs={
                                                                   "test": {wf_cfg.DATATYPE_INDEX_KEY: 0,
                                                                            wf_cfg.STEP_INDEX_KEY: 1}})]

        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_in_project(self.test_project.id)
        assert  len(stored_datatypes) == 3, "DataType from all step were not stored."
        for result_row in stored_datatypes:
            assert  result_row.type in ['Datatype1', 'Datatype2'], "Wrong type was stored."

        finished, started, error, _, _ = dao.get_operation_numbers(self.test_project.id)
        assert  finished == 3, "Didn't start operations for both adapters in workflow."
        assert  started == 0, "Some operations from workflow didn't finish."
        assert  error == 0, "Some operations finished with error status."


    def test_configuration2workflow(self):
        """
        Test that building a WorkflowStep from a WorkflowStepConfiguration. Make sure all the data is
        correctly passed. Also check that any base_wf_step is incremented to dynamic parameters step index.
        """
        workflow_step = TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1", "TestAdapter1",
                                                         static_kwargs={"static_param": "test"},
                                                         dynamic_kwargs={"dynamic_param": {wf_cfg.STEP_INDEX_KEY: 0,
                                                                                           wf_cfg.DATATYPE_INDEX_KEY: 0}},
                                                         step_index=1, base_step=5)
        assert  workflow_step.step_index == 1, "Wrong step index in created workflow step."
        assert  workflow_step.static_param == {'static_param': 'test'}, 'Different static parameters on step.'
        assert  workflow_step.dynamic_param == {'dynamic_param': {wf_cfg.STEP_INDEX_KEY: 5,
                                                                         wf_cfg.DATATYPE_INDEX_KEY: 0}},\
                         "Dynamic parameters not saved properly, or base workflow index not added to step index."


    def test_create_workflow(self):
        """
        Test that a workflow with all the associated workflow steps is actually created.
        """
        workflow_step_list = [TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter2",
                                                               "TestAdapter2", step_index=1,
                                                               static_kwargs={"test2": 2}),
                              TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1",
                                                               "TestAdapter1", step_index=2,
                                                               static_kwargs={"test1_val1": 1, "test1_val2": 1})]
        burst_id = self.__create_complex_workflow(workflow_step_list)
        workflow_entities = dao.get_workflows_for_burst(burst_id)
        assert  len(workflow_entities) == 1, "For some reason workflow was not stored in database."
        workflow_steps = dao.get_workflow_steps(workflow_entities[0].id)
        assert  len(workflow_steps) == len(workflow_step_list) + 1, "Wrong number of workflow steps created."
示例#7
0
class TestOperationService(BaseTestCase):
    """
    Test class for the introspection module. Some tests from here do async launches. For those
    cases Transactional tests won't work.
    """
    def setup_method(self):
        """
        Reset the database before each test.
        """
        self.clean_database()
        initialize_storage()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)
        self.operation_service = OperationService()
        self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE

    def teardown_method(self):
        """
        Reset the database when test is done.
        """
        TvbProfile.current.MAX_DISK_SPACE = self.backup_hdd_size
        self.clean_database()

    def _assert_no_ddti(self):
        count = dao.count_datatypes(self.test_project.id, DummyDataTypeIndex)
        assert 0 == count

    def _assert_stored_ddti(self, expected_cnt=1):
        count = dao.count_datatypes(self.test_project.id, DummyDataTypeIndex)
        assert expected_cnt == count
        datatype = dao.try_load_last_entity_of_type(self.test_project.id,
                                                    DummyDataTypeIndex)
        assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored."
        return datatype

    def test_datatypes_groups(self, test_adapter_factory,
                              datatype_group_factory):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        all_operations = dao.get_filtered_operations(self.test_project.id,
                                                     None)
        assert len(all_operations) == 0, "There should be no operation"

        dt_group = datatype_group_factory(project=self.test_project)
        model = TestModel()
        test_adapter_factory()
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter1", "TestAdapter1")

        operations = dao.get_operations_in_group(dt_group.id)

        for op in operations:
            model.gid = uuid.uuid4()
            op_path = StorageInterface().get_project_folder(
                self.test_project.name, str(op.id))
            op.view_model_gid = model.gid.hex
            op.algorithm = adapter.stored_adapter
            h5.store_view_model(model, op_path)
            dao.store_entity(op)

        all_operations = dao.get_filtered_operations(self.test_project.id,
                                                     None)
        assert len(all_operations) == 2, "Expected two operation groups"
        assert all_operations[0][2] == 6, "Expected 6 operations in one group"

        operation_group_id = all_operations[0][3]
        assert operation_group_id != None, "The operation should be part of a group."

        self.operation_service.stop_operation(all_operations[1][0])
        self.operation_service.stop_operation(all_operations[1][1])
        # Make sure operations are executed
        self.operation_service.launch_operation(all_operations[1][0], False)
        self.operation_service.launch_operation(all_operations[1][1], False)

        resulted_datatypes = dao.get_datatype_in_group(
            operation_group_id=operation_group_id)
        assert len(
            resulted_datatypes) >= 2, "Expected at least 2, but: " + str(
                len(resulted_datatypes))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(
            operation_group_id)
        assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect"

    def test_initiate_operation(self, test_adapter_factory):
        """
        Test the actual operation flow by executing a test adapter.
        """
        test_adapter_factory()
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter1", "TestAdapter1")
        view_model = TestModel()
        view_model.test1_val1 = 5
        view_model.test1_val2 = 5
        adapter.generic_attributes.subject = "Test4242"

        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project,
                                                  adapter,
                                                  model_view=view_model)

        dts, count = dao.get_values_of_datatype(self.test_project.id,
                                                DummyDataTypeIndex)
        assert count == 1
        assert len(dts) == 1
        datatype = dao.get_datatype_by_id(dts[0][0])
        assert datatype.subject == "Test4242", "Wrong data stored."
        assert datatype.type == adapter.get_output(
        )[0].__name__, "Wrong data stored."

    def test_delete_dt_free_hdd_space(self, test_adapter_factory,
                                      operation_factory):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        test_adapter_factory(adapter_class=TestAdapterHDDRequired)
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHDDRequired")
        view_model = adapter.get_view_model()()
        TvbProfile.current.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(view_model))

        self._assert_no_ddti()
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project,
                                                  adapter,
                                                  model_view=view_model)
        datatype = self._assert_stored_ddti()

        # Now free some space and relaunch
        ProjectService().remove_datatype(self.test_project.id, datatype.gid)
        self._assert_no_ddti()
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project,
                                                  adapter,
                                                  model_view=view_model)
        self._assert_stored_ddti()

    def test_launch_two_ops_hdd_with_space(self, test_adapter_factory):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        test_adapter_factory(adapter_class=TestAdapterHDDRequired)
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHDDRequired")
        view_model = adapter.get_view_model()()
        TvbProfile.current.MAX_DISK_SPACE = 2 * float(
            adapter.get_required_disk_size(view_model))

        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project,
                                                  adapter,
                                                  model_view=view_model)
        datatype = self._assert_stored_ddti()

        # Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB)
        # plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float(
            adapter.get_required_disk_size(view_model))

        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project,
                                                  adapter,
                                                  model_view=view_model)
        self._assert_stored_ddti(2)

    def test_launch_two_ops_hdd_full_space(self):
        """
        Launch two operations and give available space for user so that the first should finish,
        but after the update to the user hdd size the second should not.
        """
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHDDRequired")
        view_model = adapter.get_view_model()()

        TvbProfile.current.MAX_DISK_SPACE = (
            1 + float(adapter.get_required_disk_size(view_model)))
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project,
                                                  adapter,
                                                  model_view=view_model)

        datatype = self._assert_stored_ddti()
        # Now update the maximum disk size to be less than size of the previously resulted dts (transform kB to MB)
        # plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \
                                            float(adapter.get_required_disk_size(view_model) - 1)

        with pytest.raises(NoMemoryAvailableException):
            self.operation_service.initiate_operation(self.test_user,
                                                      self.test_project,
                                                      adapter,
                                                      model_view=view_model)
        self._assert_stored_ddti()

    def test_launch_operation_hdd_with_space(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHDDRequired")
        view_model = adapter.get_view_model()()

        TvbProfile.current.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(view_model))
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project,
                                                  adapter,
                                                  model_view=view_model)
        self._assert_stored_ddti()

    def test_launch_operation_hdd_with_space_started_ops(
            self, test_adapter_factory):
        """
        Test the actual operation flow by executing a test adapter.
        """
        test_adapter_factory(adapter_class=TestAdapterHDDRequired)
        space_taken_by_started = 100
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHDDRequired")
        form = TestAdapterHDDRequiredForm()
        adapter.submit_form(form)
        started_operation = model_operation.Operation(
            None,
            self.test_user.id,
            self.test_project.id,
            adapter.stored_adapter.id,
            status=model_operation.STATUS_STARTED,
            estimated_disk_size=space_taken_by_started)
        view_model = adapter.get_view_model()()
        dao.store_entity(started_operation)
        TvbProfile.current.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(view_model) +
            space_taken_by_started)

        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project,
                                                  adapter,
                                                  model_view=view_model)
        self._assert_stored_ddti()

    def test_launch_operation_hdd_full_space(self, test_adapter_factory):
        """
        Test the actual operation flow by executing a test adapter.
        """
        test_adapter_factory(adapter_class=TestAdapterHDDRequired)
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHDDRequired")
        form = TestAdapterHDDRequiredForm()
        adapter.submit_form(form)
        view_model = adapter.get_view_model()()
        TvbProfile.current.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(view_model) - 1)

        with pytest.raises(NoMemoryAvailableException):
            self.operation_service.initiate_operation(self.test_user,
                                                      self.test_project,
                                                      adapter,
                                                      model_view=view_model)
        self._assert_no_ddti()

    def test_launch_operation_hdd_full_space_started_ops(
            self, test_adapter_factory):
        """
        Test the actual operation flow by executing a test adapter.
        """
        test_adapter_factory(adapter_class=TestAdapterHDDRequired)
        space_taken_by_started = 100
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHDDRequired")
        form = TestAdapterHDDRequiredForm()
        adapter.submit_form(form)
        started_operation = model_operation.Operation(
            None,
            self.test_user.id,
            self.test_project.id,
            adapter.stored_adapter.id,
            status=model_operation.STATUS_STARTED,
            estimated_disk_size=space_taken_by_started)
        view_model = adapter.get_view_model()()
        dao.store_entity(started_operation)
        TvbProfile.current.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(view_model) +
            space_taken_by_started - 1)

        with pytest.raises(NoMemoryAvailableException):
            self.operation_service.initiate_operation(self.test_user,
                                                      self.test_project,
                                                      adapter,
                                                      model_view=view_model)
        self._assert_no_ddti()

    def test_stop_operation(self, test_adapter_factory):
        """
        Test that an operation is successfully stopped.
        """
        test_adapter_factory(adapter_class=TestAdapter2)
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter2", "TestAdapter2")
        view_model = adapter.get_view_model()()
        view_model.test = 5
        algo = adapter.stored_adapter
        operation = self.operation_service.prepare_operation(
            self.test_user.id, self.test_project, algo, view_model=view_model)

        self.operation_service._send_to_cluster(operation, adapter)
        self.operation_service.stop_operation(operation)

        operation = dao.get_operation_by_id(operation.id)
        assert operation.status, model_operation.STATUS_CANCELED == "Operation should have been canceled!"

    def test_stop_operation_finished(self, test_adapter_factory):
        """
        Test that an operation that is already finished is not changed by the stop operation.
        """
        test_adapter_factory()
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter1", "TestAdapter1")
        view_model = adapter.get_view_model()()
        view_model.test1_val1 = 5
        view_model.test1_val2 = 5
        algo = adapter.stored_adapter
        operation = self.operation_service.prepare_operation(
            self.test_user.id, self.test_project, algo, view_model=view_model)
        self.operation_service._send_to_cluster(operation, adapter)
        operation = dao.get_operation_by_id(operation.id)
        operation.status = model_operation.STATUS_FINISHED
        dao.store_entity(operation)
        self.operation_service.stop_operation(operation.id)
        operation = dao.get_operation_by_id(operation.id)
        assert operation.status, model_operation.STATUS_FINISHED == "Operation shouldn't have been canceled!"

    def test_fire_operation(self):
        """
        Test preparation of an adapter and launch mechanism.
        """
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter1", "TestAdapter1")
        test_user = TestFactory.create_user(username="******")
        test_project = TestFactory.create_project(admin=test_user,
                                                  name="test_project_fire_sim")

        result = OperationService().fire_operation(
            adapter,
            test_user,
            test_project.id,
            view_model=adapter.get_view_model()())
        assert result.endswith("has finished."), "Operation fail"
示例#8
0
class OperationServiceTest(BaseTestCase):
    """
    Test class for the introspection module. Some tests from here do async launches. For those
    cases Transactional tests won't work.
    """


    def setUp(self):
        """
        Reset the database before each test.
        """
        self.clean_database()
        initialize_storage()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)
        self.operation_service = OperationService()
        self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE


    def tearDown(self):
        """
        Reset the database when test is done.
        """
        TvbProfile.current.MAX_DISK_SPACE = self.backup_hdd_size
        self.clean_database()


    def test_datatypes_groups(self):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        flow_service = FlowService()

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 0, "There should be no operation")

        algogroup = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        group, _ = flow_service.prepare_adapter(self.test_project.id, algogroup)
        adapter_instance = flow_service.build_adapter_instance(group)
        data = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
        ## Create Group of operations
        flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data)

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 1, "Expected one operation group")
        self.assertEqual(all_operations[0][2], 2, "Expected 2 operations in group")

        operation_group_id = all_operations[0][3]
        self.assertNotEquals(operation_group_id, None, "The operation should be part of a group.")

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
        self.assertTrue(len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes)))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
        self.assertEqual(dt.fk_datatype_group, datatype_group.id, "DataTypeGroup is incorrect")


    def test_initiate_operation(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        module = "tvb.tests.framework.adapters.testadapter1"
        class_name = "TestAdapter1"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test1_val1": 5, "test1_val2": 5}
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        res = self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                        tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        self.assertTrue(res.index("has finished.") > 10, "Operation didn't finish")
        group = dao.find_group(module, class_name)
        self.assertEqual(group.module, 'tvb.tests.framework.adapters.testadapter1', "Wrong data stored.")
        self.assertEqual(group.classname, 'TestAdapter1', "Wrong data stored.")
        dts, count = dao.get_values_of_datatype(self.test_project.id, Datatype1)
        self.assertEqual(count, 1)
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")


    def test_delete_dt_free_HDD_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")

        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 0)
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                  tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 1)

        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")

        #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        ProjectService().remove_datatype(self.test_project.id, datatype.gid)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 0)

        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                  tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")


    def test_launch_two_ops_HDD_with_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = 2 * float(adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                  tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")
        #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float(adapter.get_required_disk_size(**data))

        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                  tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 2)
        datatype = dao.get_datatype_by_id(dts[1][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")


    def test_launch_two_ops_HDD_full_space(self):
        """
        Launch two operations and give available space for user so that the first should finish,
        but after the update to the user hdd size the second should not.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = (1 + float(adapter.get_required_disk_size(**data)))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                  tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")
        #Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \
                                                float(adapter.get_required_disk_size(**data) - 1)

        self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user,
                          self.test_project.id, adapter,
                          tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 1)


    def test_launch_operation_HDD_with_space(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                  tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")


    def test_launch_operation_HDD_with_space_started_ops(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        space_taken_by_started = 100
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        started_operation = model.Operation(self.test_user.id, self.test_project.id, group.id, "",
                                            status=model.STATUS_STARTED, estimated_disk_size=space_taken_by_started)
        dao.store_entity(started_operation)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started)
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                  tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")


    def test_launch_operation_HDD_full_space(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) - 1)
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user,
                          self.test_project.id, adapter,
                          tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 0)


    def test_launch_operation_HDD_full_space_started_ops(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        space_taken_by_started = 100
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        started_operation = model.Operation(self.test_user.id, self.test_project.id, group.id, "",
                                            status=model.STATUS_STARTED, estimated_disk_size=space_taken_by_started)
        dao.store_entity(started_operation)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started - 1)
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user,
                          self.test_project.id, adapter,
                          tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 0)


    def test_stop_operation(self):
        """
        Test that an operation is successfully stopped.
        """
        module = "tvb.tests.framework.adapters.testadapter2"
        class_name = "TestAdapter2"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test": 5}
        algo_group = adapter.algorithm_group
        algo_category = dao.get_category_by_id(algo_group.fk_category)
        algo = dao.get_algorithm_by_group(algo_group.id)
        operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                                  algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data)
        self.operation_service._send_to_cluster(operations, adapter)
        self.operation_service.stop_operation(operations[0].id)
        operation = dao.get_operation_by_id(operations[0].id)
        self.assertEqual(operation.status, model.STATUS_CANCELED, "Operation should have been canceled!")


    def test_stop_operation_finished(self):
        """
        Test that an operation that is already finished is not changed by the stop operation.
        """
        module = "tvb.tests.framework.adapters.testadapter1"
        class_name = "TestAdapter1"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test1_val1": 5, 'test1_val2': 5}
        algo_group = adapter.algorithm_group
        algo_category = dao.get_category_by_id(algo_group.fk_category)
        algo = dao.get_algorithm_by_group(algo_group.id)
        operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                                  algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data)
        self.operation_service._send_to_cluster(operations, adapter)
        operation = dao.get_operation_by_id(operations[0].id)
        operation.status = model.STATUS_FINISHED
        dao.store_entity(operation)
        self.operation_service.stop_operation(operations[0].id)
        operation = dao.get_operation_by_id(operations[0].id)
        self.assertEqual(operation.status, model.STATUS_FINISHED, "Operation shouldn't have been canceled!")


    def test_array_from_string(self):
        """
        Simple test for parse array on 1d, 2d and 3d array.
        """
        row = {'description': 'test.',
               'default': 'None',
               'required': True,
               'label': 'test: ',
               'attributes': None,
               'quantifier': 'manual',
               'elementType': 'float',
               'type': 'array',
               'options': None,
               'name': 'test'}
        input_data_string = '[ [1 2 3] [4 5 6]]'
        output = string2array(input_data_string, ' ', row['elementType'])
        self.assertEqual(output.shape, (2, 3), "Dimensions not properly parsed")
        for i in output[0]:
            self.assertTrue(i in [1, 2, 3])
        for i in output[1]:
            self.assertTrue(i in [4, 5, 6])
        input_data_string = '[1, 2, 3, 4, 5, 6]'
        output = string2array(input_data_string, ',', row['elementType'])
        self.assertEqual(output.shape, (6,), "Dimensions not properly parsed")
        for i in output:
            self.assertTrue(i in [1, 2, 3, 4, 5, 6])
        input_data_string = '[ [ [1,1], [2, 2] ], [ [3 ,3], [4,4] ] ]'
        output = string2array(input_data_string, ',', row['elementType'])
        self.assertEqual(output.shape, (2, 2, 2), "Wrong dimensions.")
        for i in output[0][0]:
            self.assertTrue(i == 1)
        for i in output[0][1]:
            self.assertTrue(i == 2)
        for i in output[1][0]:
            self.assertTrue(i == 3)
        for i in output[1][1]:
            self.assertTrue(i == 4)
        row = {'description': 'test.',
               'default': 'None',
               'required': True,
               'label': 'test: ',
               'attributes': None,
               'quantifier': 'manual',
               'elementType': 'str',
               'type': 'array',
               'options': None,
               'name': 'test'}
        input_data_string = '[1, 2, 3, 4, 5, 6]'
        output = string2array(input_data_string, ',', row['elementType'])
        for i in output:
            self.assertTrue(i in [1, 2, 3, 4, 5, 6])


    def test_wrong_array_from_string(self):
        """Test that parsing an array from string is throwing the expected 
        exception when wrong input string"""
        row = {'description': 'test.',
               'default': 'None',
               'required': True,
               'label': 'test: ',
               'attributes': None,
               'quantifier': 'manual',
               'elementType': 'float',
               'type': 'array',
               'options': None,
               'name': 'test'}
        input_data_string = '[ [1,2 3] [4,5,6]]'
        self.assertRaises(ValueError, string2array, input_data_string, ',', row['elementType'])
        input_data_string = '[ [1,2,wrong], [4, 5, 6]]'
        self.assertRaises(ValueError, string2array, input_data_string, ',', row['elementType'])
        row = {'description': 'test.',
               'default': 'None',
               'required': True,
               'label': 'test: ',
               'attributes': None,
               'quantifier': 'manual',
               'elementType': 'str',
               'type': 'array',
               'options': None,
               'name': 'test'}
        output = string2array(input_data_string, ',', row['elementType'])
        self.assertEqual(output.shape, (2, 3))
        self.assertEqual(output[0][2], 'wrong', 'String data not converted properly')
        input_data_string = '[ [1,2 3] [4,5,6]]'
        output = string2array(input_data_string, ',', row['elementType'])
        self.assertEqual(output[0][1], '2 3')


    def test_reduce_dimension_component(self):
        """
         This method tests if the data passed to the launch method of
         the NDimensionArrayAdapter adapter is correct. The passed data should be a list
         of arrays with one dimension.
        """
        inserted_count = FlowService().get_available_datatypes(self.test_project.id,
                                                               "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(inserted_count, 0, "Expected to find no data.")
        #create an operation
        algorithm_id = FlowService().get_algorithm_by_module_and_class('tvb.tests.framework.adapters.ndimensionarrayadapter',
                                                                       'NDimensionArrayAdapter')[0].id
        operation = model.Operation(self.test_user.id, self.test_project.id, algorithm_id, 'test params',
                                    meta=json.dumps({DataTypeMetaData.KEY_STATE: "RAW_DATA"}),
                                    status=model.STATUS_FINISHED, method_name=ABCAdapter.LAUNCH_METHOD)
        operation = dao.store_entity(operation)
        #save the array wrapper in DB
        adapter_instance = NDimensionArrayAdapter()
        PARAMS = {}
        self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
        inserted_data = FlowService().get_available_datatypes(self.test_project.id,
                                                              "tvb.datatypes.arrays.MappedArray")[0]
        self.assertEqual(len(inserted_data), 1, "Problems when inserting data")
        gid = inserted_data[0][2]
        entity = dao.get_datatype_by_gid(gid)
        #from the 3D array do not select any array
        PARAMS = {"python_method": "reduce_dimension", "input_data": gid,
                  "input_data_dimensions_0": "requiredDim_1",
                  "input_data_dimensions_1": "",
                  "input_data_dimensions_2": ""}
        try:
            self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
            self.fail("Test should not pass. The resulted array should be a 1D array.")
        except Exception:
            # OK, do nothing; we were expecting to produce a 1D array
            pass
        #from the 3D array select only a 1D array
        first_dim = [gid + '_1_0', 'requiredDim_1']
        PARAMS = {"python_method": "reduce_dimension", "input_data": gid,
                  "input_data_dimensions_0": first_dim,
                  "input_data_dimensions_1": gid + "_2_1"}
        self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
        expected_result = entity.array_data[:, 0, 1]
        actual_result = adapter_instance.launch_param
        self.assertEqual(len(actual_result), len(expected_result), "Not the same size for results!")
        self.assertTrue(numpy.equal(actual_result, expected_result).all())

        #from the 3D array select a 2D array
        first_dim = [gid + '_1_0', gid + '_1_1', 'requiredDim_2']
        PARAMS = {"python_method": "reduce_dimension", "input_data": gid,
                  "input_data_dimensions_0": first_dim,
                  "input_data_dimensions_1": gid + "_2_1"}
        self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
        expected_result = entity.array_data[slice(0, None), [0, 1], 1]
        actual_result = adapter_instance.launch_param
        self.assertEqual(len(actual_result), len(expected_result), "Not the same size for results!")
        self.assertTrue(numpy.equal(actual_result, expected_result).all())

        #from 3D array select 1D array by applying SUM function on the first
        #dimension and average function on the second dimension
        PARAMS = {"python_method": "reduce_dimension", "input_data": gid,
                  "input_data_dimensions_0": ["requiredDim_1", "func_sum"],
                  "input_data_dimensions_1": "func_average",
                  "input_data_dimensions_2": ""}
        self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
        aux = numpy.sum(entity.array_data, axis=0)
        expected_result = numpy.average(aux, axis=0)
        actual_result = adapter_instance.launch_param
        self.assertEqual(len(actual_result), len(expected_result), "Not the same size of results!")
        self.assertTrue(numpy.equal(actual_result, expected_result).all())

        #from 3D array select a 2D array and apply op. on the second dimension
        PARAMS = {"python_method": "reduce_dimension", "input_data": gid,
                  "input_data_dimensions_0": ["requiredDim_2", "func_sum",
                                              "expected_shape_x,512", "operations_x,>"],
                  "input_data_dimensions_1": "",
                  "input_data_dimensions_2": ""}
        try:
            self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
            self.fail("Test should not pass! The second dimension of the array should be >512.")
        except Exception:
            # OK, do nothing;
            pass
class TestOperationService(BaseTestCase):
    """
    Test class for the introspection module. Some tests from here do async launches. For those
    cases Transactional tests won't work.
    TODO: this is still to be refactored, for being huge, with duplicates and many irrelevant checks
    """


    def setup_method(self):
        """
        Reset the database before each test.
        """
        self.clean_database()
        initialize_storage()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)
        self.operation_service = OperationService()
        self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE


    def teardown_method(self):
        """
        Reset the database when test is done.
        """
        TvbProfile.current.MAX_DISK_SPACE = self.backup_hdd_size
        self.clean_database()


    def _assert_no_dt2(self):
        count = dao.count_datatypes(self.test_project.id, Datatype2)
        assert 0 == count


    def _assert_stored_dt2(self, expected_cnt=1):
        count = dao.count_datatypes(self.test_project.id, Datatype2)
        assert expected_cnt == count
        datatype = dao.try_load_last_entity_of_type(self.test_project.id, Datatype2)
        assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored."
        return datatype


    def test_datatypes_groups(self):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        flow_service = FlowService()

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        assert len(all_operations) == 0, "There should be no operation"

        adapter_instance = TestFactory.create_adapter('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        data = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
        ## Create Group of operations
        flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data)

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        assert len(all_operations) == 1, "Expected one operation group"
        assert all_operations[0][2] == 2, "Expected 2 operations in group"

        operation_group_id = all_operations[0][3]
        assert operation_group_id != None, "The operation should be part of a group."

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
        assert len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
        assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect"


    def test_initiate_operation(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        module = "tvb.tests.framework.adapters.testadapter1"
        class_name = "TestAdapter1"
        adapter = TestFactory.create_adapter(module, class_name)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test1_val1": 5, "test1_val2": 5}
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        res = self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                        tmp_folder, **data)
        assert res.index("has finished.") > 10, "Operation didn't finish"
        group = dao.get_algorithm_by_module(module, class_name)
        assert group.module == 'tvb.tests.framework.adapters.testadapter1', "Wrong data stored."
        assert group.classname == 'TestAdapter1', "Wrong data stored."
        dts, count = dao.get_values_of_datatype(self.test_project.id, Datatype1)
        assert count == 1
        assert len(dts) == 1
        datatype = dao.get_datatype_by_id(dts[0][0])
        assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored."
        assert datatype.type == output_type, "Wrong data stored."


    def test_delete_dt_free_HDD_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")

        self._assert_no_dt2()
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
        datatype = self._assert_stored_dt2()

        # Now free some space and relaunch
        ProjectService().remove_datatype(self.test_project.id, datatype.gid)
        self._assert_no_dt2()
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
        self._assert_stored_dt2()


    def test_launch_two_ops_HDD_with_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = 2 * float(adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")

        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
        datatype = self._assert_stored_dt2()

        #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float(adapter.get_required_disk_size(**data))

        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
        self._assert_stored_dt2(2)


    def test_launch_two_ops_HDD_full_space(self):
        """
        Launch two operations and give available space for user so that the first should finish,
        but after the update to the user hdd size the second should not.
        """
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
        data = {"test": 100}

        TvbProfile.current.MAX_DISK_SPACE = (1 + float(adapter.get_required_disk_size(**data)))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)

        datatype = self._assert_stored_dt2()
        #Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \
                                            float(adapter.get_required_disk_size(**data) - 1)

        with pytest.raises(NoMemoryAvailableException):
            self.operation_service.initiate_operation(self.test_user,self.test_project.id, adapter, tmp_folder, **data)
        self._assert_stored_dt2()


    def test_launch_operation_HDD_with_space(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
        data = {"test": 100}

        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
        self._assert_stored_dt2()


    def test_launch_operation_HDD_with_space_started_ops(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        space_taken_by_started = 100
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
        started_operation = model.Operation(self.test_user.id, self.test_project.id, adapter.stored_adapter.id, "",
                                            status=model.STATUS_STARTED, estimated_disk_size=space_taken_by_started)
        dao.store_entity(started_operation)
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started)
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
        self._assert_stored_dt2()


    def test_launch_operation_HDD_full_space(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) - 1)
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        with pytest.raises(NoMemoryAvailableException):
            self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
        self._assert_no_dt2()


    def test_launch_operation_HDD_full_space_started_ops(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        space_taken_by_started = 100
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
        started_operation = model.Operation(self.test_user.id, self.test_project.id, adapter.stored_adapter.id, "",
                                            status=model.STATUS_STARTED, estimated_disk_size=space_taken_by_started)
        dao.store_entity(started_operation)
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started - 1)
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        with pytest.raises(NoMemoryAvailableException):
            self.operation_service.initiate_operation(self.test_user,self.test_project.id, adapter, tmp_folder, **data)
        self._assert_no_dt2()


    def test_stop_operation(self):
        """
        Test that an operation is successfully stopped.
        """
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter2", "TestAdapter2")
        data = {"test": 5}
        algo = adapter.stored_adapter
        algo_category = dao.get_category_by_id(algo.fk_category)
        operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                                  algo_category, {}, **data)
        self.operation_service._send_to_cluster(operations, adapter)
        self.operation_service.stop_operation(operations[0].id)
        operation = dao.get_operation_by_id(operations[0].id)
        assert operation.status, model.STATUS_CANCELED == "Operation should have been canceled!"


    def test_stop_operation_finished(self):
        """
        Test that an operation that is already finished is not changed by the stop operation.
        """
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter1", "TestAdapter1")
        data = {"test1_val1": 5, 'test1_val2': 5}
        algo = adapter.stored_adapter
        algo_category = dao.get_category_by_id(algo.fk_category)
        operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                                  algo_category, {}, **data)
        self.operation_service._send_to_cluster(operations, adapter)
        operation = dao.get_operation_by_id(operations[0].id)
        operation.status = model.STATUS_FINISHED
        dao.store_entity(operation)
        self.operation_service.stop_operation(operations[0].id)
        operation = dao.get_operation_by_id(operations[0].id)
        assert operation.status, model.STATUS_FINISHED == "Operation shouldn't have been canceled!"


    def test_array_from_string(self):
        """
        Simple test for parse array on 1d, 2d and 3d array.
        """
        row = {'description': 'test.',
               'default': 'None',
               'required': True,
               'label': 'test: ',
               'attributes': None,
               'elementType': 'float',
               'type': 'array',
               'options': None,
               'name': 'test'}
        input_data_string = '[ [1 2 3] [4 5 6]]'
        output = string2array(input_data_string, ' ', row['elementType'])
        assert output.shape, (2, 3) == "Dimensions not properly parsed"
        for i in output[0]:
            assert i in [1, 2, 3]
        for i in output[1]:
            assert i in [4, 5, 6]
        input_data_string = '[1, 2, 3, 4, 5, 6]'
        output = string2array(input_data_string, ',', row['elementType'])
        assert output.shape == (6,), "Dimensions not properly parsed"
        for i in output:
            assert i in [1, 2, 3, 4, 5, 6]
        input_data_string = '[ [ [1,1], [2, 2] ], [ [3 ,3], [4,4] ] ]'
        output = string2array(input_data_string, ',', row['elementType'])
        assert output.shape == (2, 2, 2), "Wrong dimensions."
        for i in output[0][0]:
            assert i == 1
        for i in output[0][1]:
            assert i == 2
        for i in output[1][0]:
            assert i == 3
        for i in output[1][1]:
            assert i == 4
        row = {'description': 'test.',
               'default': 'None',
               'required': True,
               'label': 'test: ',
               'attributes': None,
               'elementType': 'str',
               'type': 'array',
               'options': None,
               'name': 'test'}
        input_data_string = '[1, 2, 3, 4, 5, 6]'
        output = string2array(input_data_string, ',', row['elementType'])
        for i in output:
            assert i in [1, 2, 3, 4, 5, 6]


    def test_wrong_array_from_string(self):
        """Test that parsing an array from string is throwing the expected 
        exception when wrong input string"""
        row = {'description': 'test.',
               'default': 'None',
               'required': True,
               'label': 'test: ',
               'attributes': None,
               'elementType': 'float',
               'type': 'array',
               'options': None,
               'name': 'test'}
        input_data_string = '[ [1,2 3] [4,5,6]]'
        with pytest.raises(ValueError):
            string2array(input_data_string, ',', row['elementType'])
        input_data_string = '[ [1,2,wrong], [4, 5, 6]]'
        with pytest.raises(ValueError):
            string2array(input_data_string, ',', row['elementType'])
        row = {'description': 'test.',
               'default': 'None',
               'required': True,
               'label': 'test: ',
               'attributes': None,
               'elementType': 'str',
               'type': 'array',
               'options': None,
               'name': 'test'}
        output = string2array(input_data_string, ',', row['elementType'])
        assert output.shape == (2, 3)
        assert output[0][2] == 'wrong', 'String data not converted properly'
        input_data_string = '[ [1,2 3] [4,5,6]]'
        output = string2array(input_data_string, ',', row['elementType'])
        assert output[0][1] == '2 3'


    def test_reduce_dimension_component(self):
        """
         This method tests if the data passed to the launch method of
         the NDimensionArrayAdapter adapter is correct. The passed data should be a list
         of arrays with one dimension.
        """
        inserted_count = FlowService().get_available_datatypes(self.test_project.id,
                                                               "tvb.datatypes.arrays.MappedArray")[1]
        assert inserted_count == 0, "Expected to find no data."
        #create an operation
        algorithm_id = FlowService().get_algorithm_by_module_and_class('tvb.tests.framework.adapters.ndimensionarrayadapter',
                                                                       'NDimensionArrayAdapter').id
        operation = model.Operation(self.test_user.id, self.test_project.id, algorithm_id, 'test params',
                                    meta=json.dumps({DataTypeMetaData.KEY_STATE: "RAW_DATA"}),
                                    status=model.STATUS_FINISHED)
        operation = dao.store_entity(operation)
        #save the array wrapper in DB
        adapter_instance = NDimensionArrayAdapter()
        PARAMS = {}
        self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
        inserted_data = FlowService().get_available_datatypes(self.test_project.id,
                                                              "tvb.datatypes.arrays.MappedArray")[0]
        assert len(inserted_data) == 1, "Problems when inserting data"
        gid = inserted_data[0][2]
        entity = dao.get_datatype_by_gid(gid)
        #from the 3D array do not select any array
        PARAMS = {"python_method": "reduce_dimension", "input_data": gid,
                  "input_data_dimensions_0": "requiredDim_1",
                  "input_data_dimensions_1": "",
                  "input_data_dimensions_2": ""}
        try:
            self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
            raise AssertionError("Test should not pass. The resulted array should be a 1D array.")
        except Exception:
            # OK, do nothing; we were expecting to produce a 1D array
            pass
        #from the 3D array select only a 1D array
        first_dim = [gid + '_1_0', 'requiredDim_1']
        PARAMS = {"python_method": "reduce_dimension", "input_data": gid,
                  "input_data_dimensions_0": first_dim,
                  "input_data_dimensions_1": gid + "_2_1"}
        self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
        expected_result = entity.array_data[:, 0, 1]
        actual_result = adapter_instance.launch_param
        assert len(actual_result) == len(expected_result), "Not the same size for results!"
        assert numpy.equal(actual_result, expected_result).all()

        #from the 3D array select a 2D array
        first_dim = [gid + '_1_0', gid + '_1_1', 'requiredDim_2']
        PARAMS = {"python_method": "reduce_dimension", "input_data": gid,
                  "input_data_dimensions_0": first_dim,
                  "input_data_dimensions_1": gid + "_2_1"}
        self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
        expected_result = entity.array_data[slice(0, None), [0, 1], 1]
        actual_result = adapter_instance.launch_param
        assert len(actual_result) == len(expected_result), "Not the same size for results!"
        assert numpy.equal(actual_result, expected_result).all()

        #from 3D array select 1D array by applying SUM function on the first
        #dimension and average function on the second dimension
        PARAMS = {"python_method": "reduce_dimension", "input_data": gid,
                  "input_data_dimensions_0": ["requiredDim_1", "func_sum"],
                  "input_data_dimensions_1": "func_average",
                  "input_data_dimensions_2": ""}
        self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
        aux = numpy.sum(entity.array_data, axis=0)
        expected_result = numpy.average(aux, axis=0)
        actual_result = adapter_instance.launch_param
        assert len(actual_result) == len(expected_result), "Not the same size of results!"
        assert numpy.equal(actual_result, expected_result).all()

        #from 3D array select a 2D array and apply op. on the second dimension
        PARAMS = {"python_method": "reduce_dimension", "input_data": gid,
                  "input_data_dimensions_0": ["requiredDim_2", "func_sum",
                                              "expected_shape_x,512", "operations_x,>"],
                  "input_data_dimensions_1": "",
                  "input_data_dimensions_2": ""}
        try:
            self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
            raise AssertionError("Test should not pass! The second dimension of the array should be >512.")
        except Exception:
            # OK, do nothing;
            pass
class OperationServiceTest(BaseTestCase):
    """
    Test class for the introspection module. Some tests from here do async launches. For those
    cases Transactional tests won't work.
    TODO: this is still to be refactored, for being huge, with duplicates and many irrelevant checks
    """

    def setUp(self):
        """
        Reset the database before each test.
        """
        self.clean_database()
        initialize_storage()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)
        self.operation_service = OperationService()
        self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE

    def tearDown(self):
        """
        Reset the database when test is done.
        """
        TvbProfile.current.MAX_DISK_SPACE = self.backup_hdd_size
        self.clean_database()

    def _assert_no_dt2(self):
        count = dao.count_datatypes(self.test_project.id, Datatype2)
        self.assertEqual(0, count)

    def _assert_stored_dt2(self, expected_cnt=1):
        count = dao.count_datatypes(self.test_project.id, Datatype2)
        self.assertEqual(expected_cnt, count)
        datatype = dao.try_load_last_entity_of_type(self.test_project.id, Datatype2)
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
        return datatype

    def test_datatypes_groups(self):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        flow_service = FlowService()

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 0, "There should be no operation")

        algogroup = dao.find_group("tvb.tests.framework.adapters.testadapter3", "TestAdapter3")
        group, _ = flow_service.prepare_adapter(self.test_project.id, algogroup)
        adapter_instance = flow_service.build_adapter_instance(group)
        data = {model.RANGE_PARAMETER_1: "param_5", "param_5": [1, 2]}
        ## Create Group of operations
        flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data)

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 1, "Expected one operation group")
        self.assertEqual(all_operations[0][2], 2, "Expected 2 operations in group")

        operation_group_id = all_operations[0][3]
        self.assertNotEquals(operation_group_id, None, "The operation should be part of a group.")

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
        self.assertTrue(len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes)))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
        self.assertEqual(dt.fk_datatype_group, datatype_group.id, "DataTypeGroup is incorrect")

    def test_initiate_operation(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        module = "tvb.tests.framework.adapters.testadapter1"
        class_name = "TestAdapter1"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test1_val1": 5, "test1_val2": 5}
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        res = self.operation_service.initiate_operation(
            self.test_user, self.test_project.id, adapter, tmp_folder, **data
        )
        self.assertTrue(res.index("has finished.") > 10, "Operation didn't finish")
        group = dao.find_group(module, class_name)
        self.assertEqual(group.module, "tvb.tests.framework.adapters.testadapter1", "Wrong data stored.")
        self.assertEqual(group.classname, "TestAdapter1", "Wrong data stored.")
        dts, count = dao.get_values_of_datatype(self.test_project.id, Datatype1)
        self.assertEqual(count, 1)
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")

    def test_delete_dt_free_HDD_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")

        self._assert_no_dt2()
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
        datatype = self._assert_stored_dt2()

        # Now free some space and relaunch
        ProjectService().remove_datatype(self.test_project.id, datatype.gid)
        self._assert_no_dt2()
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
        self._assert_stored_dt2()

    def test_launch_two_ops_HDD_with_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = 2 * float(adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")

        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
        datatype = self._assert_stored_dt2()

        # Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB)
        # plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float(adapter.get_required_disk_size(**data))

        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
        self._assert_stored_dt2(2)

    def test_launch_two_ops_HDD_full_space(self):
        """
        Launch two operations and give available space for user so that the first should finish,
        but after the update to the user hdd size the second should not.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)

        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = 1 + float(adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)

        datatype = self._assert_stored_dt2()
        # Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB)
        # plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + float(
            adapter.get_required_disk_size(**data) - 1
        )

        self.assertRaises(
            NoMemoryAvailableException,
            self.operation_service.initiate_operation,
            self.test_user,
            self.test_project.id,
            adapter,
            tmp_folder,
            **data
        )
        self._assert_stored_dt2()

    def test_launch_operation_HDD_with_space(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test": 100}

        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
        self._assert_stored_dt2()

    def test_launch_operation_HDD_with_space_started_ops(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        space_taken_by_started = 100
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        started_operation = model.Operation(
            self.test_user.id,
            self.test_project.id,
            group.id,
            "",
            status=model.STATUS_STARTED,
            estimated_disk_size=space_taken_by_started,
        )
        dao.store_entity(started_operation)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started)
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
        self._assert_stored_dt2()

    def test_launch_operation_HDD_full_space(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) - 1)
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.assertRaises(
            NoMemoryAvailableException,
            self.operation_service.initiate_operation,
            self.test_user,
            self.test_project.id,
            adapter,
            tmp_folder,
            **data
        )
        self._assert_no_dt2()

    def test_launch_operation_HDD_full_space_started_ops(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        space_taken_by_started = 100
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        started_operation = model.Operation(
            self.test_user.id,
            self.test_project.id,
            group.id,
            "",
            status=model.STATUS_STARTED,
            estimated_disk_size=space_taken_by_started,
        )
        dao.store_entity(started_operation)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started - 1)
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.assertRaises(
            NoMemoryAvailableException,
            self.operation_service.initiate_operation,
            self.test_user,
            self.test_project.id,
            adapter,
            tmp_folder,
            **data
        )
        self._assert_no_dt2()

    def test_stop_operation(self):
        """
        Test that an operation is successfully stopped.
        """
        module = "tvb.tests.framework.adapters.testadapter2"
        class_name = "TestAdapter2"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test": 5}
        algo_group = adapter.algorithm_group
        algo_category = dao.get_category_by_id(algo_group.fk_category)
        algo = dao.get_algorithm_by_group(algo_group.id)
        operations, _ = self.operation_service.prepare_operations(
            self.test_user.id, self.test_project.id, algo, algo_category, {}, **data
        )
        self.operation_service._send_to_cluster(operations, adapter)
        self.operation_service.stop_operation(operations[0].id)
        operation = dao.get_operation_by_id(operations[0].id)
        self.assertEqual(operation.status, model.STATUS_CANCELED, "Operation should have been canceled!")

    def test_stop_operation_finished(self):
        """
        Test that an operation that is already finished is not changed by the stop operation.
        """
        module = "tvb.tests.framework.adapters.testadapter1"
        class_name = "TestAdapter1"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test1_val1": 5, "test1_val2": 5}
        algo_group = adapter.algorithm_group
        algo_category = dao.get_category_by_id(algo_group.fk_category)
        algo = dao.get_algorithm_by_group(algo_group.id)
        operations, _ = self.operation_service.prepare_operations(
            self.test_user.id, self.test_project.id, algo, algo_category, {}, **data
        )
        self.operation_service._send_to_cluster(operations, adapter)
        operation = dao.get_operation_by_id(operations[0].id)
        operation.status = model.STATUS_FINISHED
        dao.store_entity(operation)
        self.operation_service.stop_operation(operations[0].id)
        operation = dao.get_operation_by_id(operations[0].id)
        self.assertEqual(operation.status, model.STATUS_FINISHED, "Operation shouldn't have been canceled!")

    def test_array_from_string(self):
        """
        Simple test for parse array on 1d, 2d and 3d array.
        """
        row = {
            "description": "test.",
            "default": "None",
            "required": True,
            "label": "test: ",
            "attributes": None,
            "quantifier": "manual",
            "elementType": "float",
            "type": "array",
            "options": None,
            "name": "test",
        }
        input_data_string = "[ [1 2 3] [4 5 6]]"
        output = string2array(input_data_string, " ", row["elementType"])
        self.assertEqual(output.shape, (2, 3), "Dimensions not properly parsed")
        for i in output[0]:
            self.assertTrue(i in [1, 2, 3])
        for i in output[1]:
            self.assertTrue(i in [4, 5, 6])
        input_data_string = "[1, 2, 3, 4, 5, 6]"
        output = string2array(input_data_string, ",", row["elementType"])
        self.assertEqual(output.shape, (6,), "Dimensions not properly parsed")
        for i in output:
            self.assertTrue(i in [1, 2, 3, 4, 5, 6])
        input_data_string = "[ [ [1,1], [2, 2] ], [ [3 ,3], [4,4] ] ]"
        output = string2array(input_data_string, ",", row["elementType"])
        self.assertEqual(output.shape, (2, 2, 2), "Wrong dimensions.")
        for i in output[0][0]:
            self.assertTrue(i == 1)
        for i in output[0][1]:
            self.assertTrue(i == 2)
        for i in output[1][0]:
            self.assertTrue(i == 3)
        for i in output[1][1]:
            self.assertTrue(i == 4)
        row = {
            "description": "test.",
            "default": "None",
            "required": True,
            "label": "test: ",
            "attributes": None,
            "quantifier": "manual",
            "elementType": "str",
            "type": "array",
            "options": None,
            "name": "test",
        }
        input_data_string = "[1, 2, 3, 4, 5, 6]"
        output = string2array(input_data_string, ",", row["elementType"])
        for i in output:
            self.assertTrue(i in [1, 2, 3, 4, 5, 6])

    def test_wrong_array_from_string(self):
        """Test that parsing an array from string is throwing the expected 
        exception when wrong input string"""
        row = {
            "description": "test.",
            "default": "None",
            "required": True,
            "label": "test: ",
            "attributes": None,
            "quantifier": "manual",
            "elementType": "float",
            "type": "array",
            "options": None,
            "name": "test",
        }
        input_data_string = "[ [1,2 3] [4,5,6]]"
        self.assertRaises(ValueError, string2array, input_data_string, ",", row["elementType"])
        input_data_string = "[ [1,2,wrong], [4, 5, 6]]"
        self.assertRaises(ValueError, string2array, input_data_string, ",", row["elementType"])
        row = {
            "description": "test.",
            "default": "None",
            "required": True,
            "label": "test: ",
            "attributes": None,
            "quantifier": "manual",
            "elementType": "str",
            "type": "array",
            "options": None,
            "name": "test",
        }
        output = string2array(input_data_string, ",", row["elementType"])
        self.assertEqual(output.shape, (2, 3))
        self.assertEqual(output[0][2], "wrong", "String data not converted properly")
        input_data_string = "[ [1,2 3] [4,5,6]]"
        output = string2array(input_data_string, ",", row["elementType"])
        self.assertEqual(output[0][1], "2 3")

    def test_reduce_dimension_component(self):
        """
         This method tests if the data passed to the launch method of
         the NDimensionArrayAdapter adapter is correct. The passed data should be a list
         of arrays with one dimension.
        """
        inserted_count = FlowService().get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray"
        )[1]
        self.assertEqual(inserted_count, 0, "Expected to find no data.")
        # create an operation
        algorithm_id = (
            FlowService()
            .get_algorithm_by_module_and_class(
                "tvb.tests.framework.adapters.ndimensionarrayadapter", "NDimensionArrayAdapter"
            )[0]
            .id
        )
        operation = model.Operation(
            self.test_user.id,
            self.test_project.id,
            algorithm_id,
            "test params",
            meta=json.dumps({DataTypeMetaData.KEY_STATE: "RAW_DATA"}),
            status=model.STATUS_FINISHED,
        )
        operation = dao.store_entity(operation)
        # save the array wrapper in DB
        adapter_instance = NDimensionArrayAdapter()
        PARAMS = {}
        self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
        inserted_data = FlowService().get_available_datatypes(self.test_project.id, "tvb.datatypes.arrays.MappedArray")[
            0
        ]
        self.assertEqual(len(inserted_data), 1, "Problems when inserting data")
        gid = inserted_data[0][2]
        entity = dao.get_datatype_by_gid(gid)
        # from the 3D array do not select any array
        PARAMS = {
            "python_method": "reduce_dimension",
            "input_data": gid,
            "input_data_dimensions_0": "requiredDim_1",
            "input_data_dimensions_1": "",
            "input_data_dimensions_2": "",
        }
        try:
            self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
            self.fail("Test should not pass. The resulted array should be a 1D array.")
        except Exception:
            # OK, do nothing; we were expecting to produce a 1D array
            pass
        # from the 3D array select only a 1D array
        first_dim = [gid + "_1_0", "requiredDim_1"]
        PARAMS = {
            "python_method": "reduce_dimension",
            "input_data": gid,
            "input_data_dimensions_0": first_dim,
            "input_data_dimensions_1": gid + "_2_1",
        }
        self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
        expected_result = entity.array_data[:, 0, 1]
        actual_result = adapter_instance.launch_param
        self.assertEqual(len(actual_result), len(expected_result), "Not the same size for results!")
        self.assertTrue(numpy.equal(actual_result, expected_result).all())

        # from the 3D array select a 2D array
        first_dim = [gid + "_1_0", gid + "_1_1", "requiredDim_2"]
        PARAMS = {
            "python_method": "reduce_dimension",
            "input_data": gid,
            "input_data_dimensions_0": first_dim,
            "input_data_dimensions_1": gid + "_2_1",
        }
        self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
        expected_result = entity.array_data[slice(0, None), [0, 1], 1]
        actual_result = adapter_instance.launch_param
        self.assertEqual(len(actual_result), len(expected_result), "Not the same size for results!")
        self.assertTrue(numpy.equal(actual_result, expected_result).all())

        # from 3D array select 1D array by applying SUM function on the first
        # dimension and average function on the second dimension
        PARAMS = {
            "python_method": "reduce_dimension",
            "input_data": gid,
            "input_data_dimensions_0": ["requiredDim_1", "func_sum"],
            "input_data_dimensions_1": "func_average",
            "input_data_dimensions_2": "",
        }
        self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
        aux = numpy.sum(entity.array_data, axis=0)
        expected_result = numpy.average(aux, axis=0)
        actual_result = adapter_instance.launch_param
        self.assertEqual(len(actual_result), len(expected_result), "Not the same size of results!")
        self.assertTrue(numpy.equal(actual_result, expected_result).all())

        # from 3D array select a 2D array and apply op. on the second dimension
        PARAMS = {
            "python_method": "reduce_dimension",
            "input_data": gid,
            "input_data_dimensions_0": ["requiredDim_2", "func_sum", "expected_shape_x,512", "operations_x,>"],
            "input_data_dimensions_1": "",
            "input_data_dimensions_2": "",
        }
        try:
            self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
            self.fail("Test should not pass! The second dimension of the array should be >512.")
        except Exception:
            # OK, do nothing;
            pass
示例#11
0
class TestOperationService(BaseTestCase):
    """
    Test class for the introspection module. Some tests from here do async launches. For those
    cases Transactional tests won't work.
    TODO: this is still to be refactored, for being huge, with duplicates and many irrelevant checks
    """
    def setup_method(self):
        """
        Reset the database before each test.
        """
        self.clean_database()
        initialize_storage()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)
        self.operation_service = OperationService()
        self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE

    def teardown_method(self):
        """
        Reset the database when test is done.
        """
        TvbProfile.current.MAX_DISK_SPACE = self.backup_hdd_size
        self.clean_database()

    def _assert_no_dt2(self):
        count = dao.count_datatypes(self.test_project.id, DummyDataTypeIndex)
        assert 0 == count

    def _assert_stored_dt2(self, expected_cnt=1):
        count = dao.count_datatypes(self.test_project.id, Datatype2)
        assert expected_cnt == count
        datatype = dao.try_load_last_entity_of_type(self.test_project.id,
                                                    Datatype2)
        assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored."
        return datatype

    def test_datatypes_groups(self, test_adapter_factory):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """

        all_operations = dao.get_filtered_operations(self.test_project.id,
                                                     None)
        assert len(all_operations) == 0, "There should be no operation"

        test_adapter_factory(TestAdapter3)
        algo = dao.get_algorithm_by_module(
            'tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        adapter_instance = ABCAdapter.build_adapter(algo)
        data = {model_burst.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
        ## Create Group of operations
        FlowService().fire_operation(adapter_instance, self.test_user,
                                     self.test_project.id, **data)

        all_operations = dao.get_filtered_operations(self.test_project.id,
                                                     None)
        assert len(all_operations) == 1, "Expected one operation group"
        assert all_operations[0][2] == 2, "Expected 2 operations in group"

        operation_group_id = all_operations[0][3]
        assert operation_group_id != None, "The operation should be part of a group."

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(
            operation_group_id=operation_group_id)
        assert len(
            resulted_datatypes) >= 2, "Expected at least 2, but: " + str(
                len(resulted_datatypes))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(
            operation_group_id)
        assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect"

    def test_initiate_operation(self, test_adapter_factory):
        """
        Test the actual operation flow by executing a test adapter.
        """
        module = "tvb.tests.framework.adapters.testadapter1"
        class_name = "TestAdapter1"
        test_adapter_factory()
        adapter = TestFactory.create_adapter(module, class_name)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test1_val1": 5, "test1_val2": 5}
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project, adapter,
                                                  tmp_folder, **data)

        group = dao.get_algorithm_by_module(module, class_name)
        assert group.module == 'tvb.tests.framework.adapters.testadapter1', "Wrong data stored."
        assert group.classname == 'TestAdapter1', "Wrong data stored."
        dts, count = dao.get_values_of_datatype(self.test_project.id,
                                                DummyDataTypeIndex)
        assert count == 1
        assert len(dts) == 1
        datatype = dao.get_datatype_by_id(dts[0][0])
        assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored."
        assert datatype.type == output_type, "Wrong data stored."

    def test_delete_dt_free_hdd_space(self, test_adapter_factory,
                                      operation_factory):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        test_adapter_factory(adapter_class=TestAdapterHDDRequired)
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHDDRequired")
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")

        self._assert_no_dt2()
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project, adapter,
                                                  tmp_folder, **data)
        datatype = self._assert_stored_dt2()

        # Now free some space and relaunch
        ProjectService().remove_datatype(self.test_project.id, datatype.gid)
        self._assert_no_dt2()
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project, adapter,
                                                  tmp_folder, **data)
        self._assert_stored_dt2()

    def test_launch_two_ops_hdd_with_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHDDRequired")
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = 2 * float(
            adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")

        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project, adapter,
                                                  tmp_folder, **data)
        datatype = self._assert_stored_dt2()

        #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float(
            adapter.get_required_disk_size(**data))

        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project, adapter,
                                                  tmp_folder, **data)
        self._assert_stored_dt2(2)

    def test_launch_two_ops_hdd_full_space(self):
        """
        Launch two operations and give available space for user so that the first should finish,
        but after the update to the user hdd size the second should not.
        """
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHDDRequired")
        data = {"test": 100}

        TvbProfile.current.MAX_DISK_SPACE = (
            1 + float(adapter.get_required_disk_size(**data)))
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project, adapter,
                                                  tmp_folder, **data)

        datatype = self._assert_stored_dt2()
        #Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \
                                            float(adapter.get_required_disk_size(**data) - 1)

        with pytest.raises(NoMemoryAvailableException):
            self.operation_service.initiate_operation(self.test_user,
                                                      self.test_project,
                                                      adapter, tmp_folder,
                                                      **data)
        self._assert_stored_dt2()

    def test_launch_operation_hdd_with_space(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHDDRequired")
        data = {"test": 100}

        TvbProfile.current.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project, adapter,
                                                  tmp_folder, **data)
        self._assert_stored_dt2()

    def test_launch_operation_hdd_with_space_started_ops(
            self, test_adapter_factory):
        """
        Test the actual operation flow by executing a test adapter.
        """
        test_adapter_factory(adapter_class=TestAdapterHDDRequired)

        space_taken_by_started = 100
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHDDRequired")
        form = TestAdapterHDDRequiredForm()
        form.fill_from_post({'_test': "100"})
        adapter.submit_form(form)
        started_operation = model_operation.Operation(
            self.test_user.id,
            self.test_project.id,
            adapter.stored_adapter.id,
            "",
            status=model_operation.STATUS_STARTED,
            estimated_disk_size=space_taken_by_started)
        dao.store_entity(started_operation)
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(**data) + space_taken_by_started)
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project, adapter,
                                                  tmp_folder, **data)
        self._assert_stored_dt2()

    def test_launch_operation_hdd_full_space(self, test_adapter_factory):
        """
        Test the actual operation flow by executing a test adapter.
        """
        test_adapter_factory(adapter_class=TestAdapterHDDRequired)

        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHDDRequired")
        form = TestAdapterHDDRequiredForm()
        adapter.submit_form(form)
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(**data) - 1)
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")
        with pytest.raises(NoMemoryAvailableException):
            self.operation_service.initiate_operation(self.test_user,
                                                      self.test_project,
                                                      adapter, tmp_folder,
                                                      **data)
        self._assert_no_dt2()

    def test_launch_operation_hdd_full_space_started_ops(
            self, test_adapter_factory):
        """
        Test the actual operation flow by executing a test adapter.
        """
        test_adapter_factory(adapter_class=TestAdapterHDDRequired)

        space_taken_by_started = 100
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHDDRequired")
        form = TestAdapterHDDRequiredForm()
        adapter.submit_form(form)
        started_operation = model_operation.Operation(
            self.test_user.id,
            self.test_project.id,
            adapter.stored_adapter.id,
            "",
            status=model_operation.STATUS_STARTED,
            estimated_disk_size=space_taken_by_started)
        dao.store_entity(started_operation)
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(**data) + space_taken_by_started -
            1)
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")
        with pytest.raises(NoMemoryAvailableException):
            self.operation_service.initiate_operation(self.test_user,
                                                      self.test_project,
                                                      adapter, tmp_folder,
                                                      **data)
        self._assert_no_dt2()

    def test_stop_operation(self, test_adapter_factory):
        """
        Test that an operation is successfully stopped.
        """
        test_adapter_factory(adapter_class=TestAdapter2)
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter2", "TestAdapter2")
        data = {"test": 5}
        algo = adapter.stored_adapter
        algo_category = dao.get_category_by_id(algo.fk_category)
        operations, _ = self.operation_service.prepare_operations(
            self.test_user.id, self.test_project, algo, algo_category, {},
            **data)
        self.operation_service._send_to_cluster(operations, adapter)
        self.operation_service.stop_operation(operations[0].id)
        operation = dao.get_operation_by_id(operations[0].id)
        assert operation.status, model_operation.STATUS_CANCELED == "Operation should have been canceled!"

    def test_stop_operation_finished(self, test_adapter_factory):
        """
        Test that an operation that is already finished is not changed by the stop operation.
        """
        test_adapter_factory()
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter1", "TestAdapter1")
        data = {"test1_val1": 5, 'test1_val2': 5}
        algo = adapter.stored_adapter
        algo_category = dao.get_category_by_id(algo.fk_category)
        operations, _ = self.operation_service.prepare_operations(
            self.test_user.id, self.test_project, algo, algo_category, {},
            **data)
        self.operation_service._send_to_cluster(operations, adapter)
        operation = dao.get_operation_by_id(operations[0].id)
        operation.status = model_operation.STATUS_FINISHED
        dao.store_entity(operation)
        self.operation_service.stop_operation(operations[0].id)
        operation = dao.get_operation_by_id(operations[0].id)
        assert operation.status, model_operation.STATUS_FINISHED == "Operation shouldn't have been canceled!"

    def test_array_from_string(self):
        """
        Simple test for parse array on 1d, 2d and 3d array.
        """
        row = {
            'description': 'test.',
            'default': 'None',
            'required': True,
            'label': 'test: ',
            'attributes': None,
            'elementType': 'float',
            'type': 'array',
            'options': None,
            'name': 'test'
        }
        input_data_string = '[ [1 2 3] [4 5 6]]'
        output = string2array(input_data_string, ' ', row['elementType'])
        assert output.shape, (2, 3) == "Dimensions not properly parsed"
        for i in output[0]:
            assert i in [1, 2, 3]
        for i in output[1]:
            assert i in [4, 5, 6]
        input_data_string = '[1, 2, 3, 4, 5, 6]'
        output = string2array(input_data_string, ',', row['elementType'])
        assert output.shape == (6, ), "Dimensions not properly parsed"
        for i in output:
            assert i in [1, 2, 3, 4, 5, 6]
        input_data_string = '[ [ [1,1], [2, 2] ], [ [3 ,3], [4,4] ] ]'
        output = string2array(input_data_string, ',', row['elementType'])
        assert output.shape == (2, 2, 2), "Wrong dimensions."
        for i in output[0][0]:
            assert i == 1
        for i in output[0][1]:
            assert i == 2
        for i in output[1][0]:
            assert i == 3
        for i in output[1][1]:
            assert i == 4
        row = {
            'description': 'test.',
            'default': 'None',
            'required': True,
            'label': 'test: ',
            'attributes': None,
            'elementType': 'str',
            'type': 'array',
            'options': None,
            'name': 'test'
        }
        input_data_string = '[1, 2, 3, 4, 5, 6]'
        output = string2array(input_data_string, ',', row['elementType'])
        for i in output:
            assert i in [1, 2, 3, 4, 5, 6]

    def test_wrong_array_from_string(self):
        """Test that parsing an array from string is throwing the expected 
        exception when wrong input string"""
        row = {
            'description': 'test.',
            'default': 'None',
            'required': True,
            'label': 'test: ',
            'attributes': None,
            'elementType': 'float',
            'type': 'array',
            'options': None,
            'name': 'test'
        }
        input_data_string = '[ [1,2 3] [4,5,6]]'
        with pytest.raises(ValueError):
            string2array(input_data_string, ',', row['elementType'])
        input_data_string = '[ [1,2,wrong], [4, 5, 6]]'
        with pytest.raises(ValueError):
            string2array(input_data_string, ',', row['elementType'])
        row = {
            'description': 'test.',
            'default': 'None',
            'required': True,
            'label': 'test: ',
            'attributes': None,
            'elementType': 'str',
            'type': 'array',
            'options': None,
            'name': 'test'
        }
        output = string2array(input_data_string, ',', row['elementType'])
        assert output.shape == (2, 3)
        assert output[0][2] == 'wrong', 'String data not converted properly'
        input_data_string = '[ [1,2 3] [4,5,6]]'
        output = string2array(input_data_string, ',', row['elementType'])
        assert output[0][1] == '2 3'

    def test_reduce_dimension_component(self):
        """
         This method tests if the data passed to the launch method of
         the NDimensionArrayAdapter adapter is correct. The passed data should be a list
         of arrays with one dimension.
        """
        inserted_count = FlowService().get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray")[1]
        assert inserted_count == 0, "Expected to find no data."
        #create an operation
        algorithm_id = FlowService().get_algorithm_by_module_and_class(
            'tvb.tests.framework.adapters.ndimensionarrayadapter',
            'NDimensionArrayAdapter').id
        operation = model_operation.Operation(
            self.test_user.id,
            self.test_project.id,
            algorithm_id,
            'test params',
            meta=json.dumps({DataTypeMetaData.KEY_STATE: "RAW_DATA"}),
            status=model_operation.STATUS_FINISHED)
        operation = dao.store_entity(operation)
        #save the array wrapper in DB
        adapter_instance = NDimensionArrayAdapter()
        PARAMS = {}
        self.operation_service.initiate_prelaunch(operation, adapter_instance,
                                                  {}, **PARAMS)
        inserted_data = FlowService().get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray")[0]
        assert len(inserted_data) == 1, "Problems when inserting data"
        gid = inserted_data[0][2]
        entity = dao.get_datatype_by_gid(gid)
        #from the 3D array do not select any array
        PARAMS = {
            "python_method": "reduce_dimension",
            "input_data": gid,
            "input_data_dimensions_0": "requiredDim_1",
            "input_data_dimensions_1": "",
            "input_data_dimensions_2": ""
        }
        try:
            self.operation_service.initiate_prelaunch(operation,
                                                      adapter_instance, {},
                                                      **PARAMS)
            raise AssertionError(
                "Test should not pass. The resulted array should be a 1D array."
            )
        except Exception:
            # OK, do nothing; we were expecting to produce a 1D array
            pass
        #from the 3D array select only a 1D array
        first_dim = [gid + '_1_0', 'requiredDim_1']
        PARAMS = {
            "python_method": "reduce_dimension",
            "input_data": gid,
            "input_data_dimensions_0": first_dim,
            "input_data_dimensions_1": gid + "_2_1"
        }
        self.operation_service.initiate_prelaunch(operation, adapter_instance,
                                                  {}, **PARAMS)
        expected_result = entity.array_data[:, 0, 1]
        actual_result = adapter_instance.launch_param
        assert len(actual_result) == len(
            expected_result), "Not the same size for results!"
        assert numpy.equal(actual_result, expected_result).all()

        #from the 3D array select a 2D array
        first_dim = [gid + '_1_0', gid + '_1_1', 'requiredDim_2']
        PARAMS = {
            "python_method": "reduce_dimension",
            "input_data": gid,
            "input_data_dimensions_0": first_dim,
            "input_data_dimensions_1": gid + "_2_1"
        }
        self.operation_service.initiate_prelaunch(operation, adapter_instance,
                                                  {}, **PARAMS)
        expected_result = entity.array_data[slice(0, None), [0, 1], 1]
        actual_result = adapter_instance.launch_param
        assert len(actual_result) == len(
            expected_result), "Not the same size for results!"
        assert numpy.equal(actual_result, expected_result).all()

        #from 3D array select 1D array by applying SUM function on the first
        #dimension and average function on the second dimension
        PARAMS = {
            "python_method": "reduce_dimension",
            "input_data": gid,
            "input_data_dimensions_0": ["requiredDim_1", "func_sum"],
            "input_data_dimensions_1": "func_average",
            "input_data_dimensions_2": ""
        }
        self.operation_service.initiate_prelaunch(operation, adapter_instance,
                                                  {}, **PARAMS)
        aux = numpy.sum(entity.array_data, axis=0)
        expected_result = numpy.average(aux, axis=0)
        actual_result = adapter_instance.launch_param
        assert len(actual_result) == len(
            expected_result), "Not the same size of results!"
        assert numpy.equal(actual_result, expected_result).all()

        #from 3D array select a 2D array and apply op. on the second dimension
        PARAMS = {
            "python_method":
            "reduce_dimension",
            "input_data":
            gid,
            "input_data_dimensions_0": [
                "requiredDim_2", "func_sum", "expected_shape_x,512",
                "operations_x,>"
            ],
            "input_data_dimensions_1":
            "",
            "input_data_dimensions_2":
            ""
        }
        try:
            self.operation_service.initiate_prelaunch(operation,
                                                      adapter_instance, {},
                                                      **PARAMS)
            raise AssertionError(
                "Test should not pass! The second dimension of the array should be >512."
            )
        except Exception:
            # OK, do nothing;
            pass