コード例 #1
0
    def create_group(test_user=None, test_project=None, subject="John Doe"):
        """
        Create a group of 2 operations, each with at least one resultant DataType.
        """
        if test_user is None:
            test_user = TestFactory.create_user()
        if test_project is None:
            test_project = TestFactory.create_project(test_user)

        ### Retrieve Adapter instance
        algo_group = dao.find_group('tvb_test.adapters.testadapter3',
                                    'TestAdapter3')
        algo_category = dao.get_category_by_id(algo_group.fk_category)
        algo = dao.get_algorithm_by_group(algo_group.id)

        adapter_inst = TestFactory.create_adapter(algo_group=algo_group,
                                                  test_project=test_project)
        adapter_inst.meta_data = {
            DataTypeMetaData.KEY_SUBJECT: subject,
            DataTypeMetaData.KEY_STATE: "INTERMEDIATE"
        }
        args = {'first_range': 'param_5', 'param_5': [1, 2]}

        ### Prepare Operations group. Execute them synchronously
        service = OperationService()
        operations = service.prepare_operations(test_user.id, test_project.id,
                                                algo, algo_category, {},
                                                **args)[0]
        service.launch_operation(operations[0].id, False, adapter_inst)
        service.launch_operation(operations[1].id, False, adapter_inst)

        resulted_dts = dao.get_datatype_in_group(
            operations[0].fk_operation_group)
        return resulted_dts, operations[0].fk_operation_group
コード例 #2
0
class WorkflowTest(TransactionalTestCase):
    """
    Test that workflow conversion methods are valid.
    """
    def setUp(self):
        #        self.clean_database()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)
        self.old_config_file = cfg.CURRENT_DIR
        cfg.CURRENT_DIR = os.path.dirname(tvb_test.__file__)
        self.workflow_service = WorkflowService()
        self.burst_service = BurstService()
        self.operation_service = OperationService()
        self.flow_service = FlowService()

    def tearDown(self):
        """
        Remove project folders and clean up database.
        """
        FilesHelper().remove_project_structure(self.test_project.name)
        self.delete_project_folders()
        cfg.CURRENT_DIR = self.old_config_file

    def __create_complex_workflow(self, workflow_step_list):
        """
        Creates a burst with a complex workflow with a given list of workflow steps.
        @param workflow_step_list: a lsit of workflow steps that will be used in the
            creation of a new workflow for a new burst
        """
        burst_config = TestFactory.store_burst(self.test_project.id)

        stored_dt = datatypes_factory.DatatypesFactory()._store_datatype(
            Datatype1())

        first_step_algorithm = self.flow_service.get_algorithm_by_module_and_class(
            "tvb_test.adapters.testadapter1", "TestAdapterDatatypeInput")[0]
        metadata = {DataTypeMetaData.KEY_BURST: burst_config.id}
        kwargs = {"test_dt_input": stored_dt.gid, 'test_non_dt_input': '0'}
        operations, group = self.operation_service.prepare_operations(
            self.test_user.id, self.test_project.id, first_step_algorithm,
            first_step_algorithm.algo_group.group_category, metadata, **kwargs)

        workflows = self.workflow_service.create_and_store_workflow(
            project_id=self.test_project.id,
            burst_id=burst_config.id,
            simulator_index=0,
            simulator_id=first_step_algorithm.id,
            operations=operations)
        self.operation_service.prepare_operations_for_workflowsteps(
            workflow_step_list, workflows, self.test_user.id, burst_config.id,
            self.test_project.id, group, operations)
        #fire the first op
        if len(operations) > 0:
            self.operation_service.launch_operation(operations[0].id, False)
        return burst_config.id

    def test_workflow_generation(self):
        """
        A simple test just for the fact that a workflow is created an ran, 
        no dynamic parameters are passed. In this case we create a two steps
        workflow: step1 - tvb_test.adapters.testadapter2.TestAdapter2
                  step2 - tvb_test.adapters.testadapter1.TestAdapter1
        The first adapter doesn't return anything and the second returns one
        tvb.datatypes.datatype1.Datatype1 instance. We check that the steps
        are actually ran by checking that two operations are created and that
        one dataType is stored.
        """
        workflow_step_list = [
            TestFactory.create_workflow_step("tvb_test.adapters.testadapter2",
                                             "TestAdapter2",
                                             static_kwargs={"test2": 2},
                                             step_index=1),
            TestFactory.create_workflow_step("tvb_test.adapters.testadapter1",
                                             "TestAdapter1",
                                             static_kwargs={
                                                 "test1_val1": 1,
                                                 "test1_val2": 1
                                             },
                                             step_index=2)
        ]
        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_info_for_project(
            self.test_project.id)
        self.assertTrue(
            len(stored_datatypes) == 2,
            "DataType from second step was not stored.")
        self.assertTrue(stored_datatypes[0][0] == 'Datatype1',
                        "Wrong type was stored.")
        self.assertTrue(stored_datatypes[1][0] == 'Datatype1',
                        "Wrong type was stored.")

        finished, started, error, _ = dao.get_operation_numbers(
            self.test_project.id)
        self.assertEqual(
            finished, 3,
            "Didnt start operations for both adapters in workflow.")
        self.assertEqual(started, 0,
                         "Some operations from workflow didnt finish.")
        self.assertEqual(error, 0,
                         "Some operations finished with error status.")

    def test_workflow_dynamic_params(self):
        """
        A simple test just for the fact that dynamic parameters are passed properly
        between two workflow steps: 
                  step1 - tvb_test.adapters.testadapter1.TestAdapter1
                  step2 - tvb_test.adapters.testadapter3.TestAdapter3
        The first adapter returns a tvb.datatypes.datatype1.Datatype1 instance. 
        The second adapter has this passed as a dynamic workflow parameter.
        We check that the steps are actually ran by checking that two operations 
        are created and that two dataTypes are stored.
        """
        workflow_step_list = [
            TestFactory.create_workflow_step("tvb_test.adapters.testadapter1",
                                             "TestAdapter1",
                                             static_kwargs={
                                                 "test1_val1": 1,
                                                 "test1_val2": 1
                                             },
                                             step_index=1),
            TestFactory.create_workflow_step("tvb_test.adapters.testadapter3",
                                             "TestAdapter3",
                                             dynamic_kwargs={
                                                 "test": {
                                                     wf_cfg.DATATYPE_INDEX_KEY:
                                                     0,
                                                     wf_cfg.STEP_INDEX_KEY: 1
                                                 }
                                             },
                                             step_index=2)
        ]

        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_info_for_project(
            self.test_project.id)
        self.assertTrue(
            len(stored_datatypes) == 3,
            "DataType from all step were not stored.")
        for result_row in stored_datatypes:
            self.assertTrue(result_row[0] in ['Datatype1', 'Datatype2'],
                            "Wrong type was stored.")

        finished, started, error, _ = dao.get_operation_numbers(
            self.test_project.id)
        self.assertEqual(
            finished, 3,
            "Didn't start operations for both adapters in workflow.")
        self.assertEqual(started, 0,
                         "Some operations from workflow didn't finish.")
        self.assertEqual(error, 0,
                         "Some operations finished with error status.")

    def test_configuration2workflow(self):
        """
        Test that building a WorflowStep from a WorkflowStepConfiguration. Make sure all the data is 
        correctly passed. Also check that any base_wf_step is incremented to dynamic parameters step index.
        """
        workflow_step = TestFactory.create_workflow_step(
            "tvb_test.adapters.testadapter1",
            "TestAdapter1",
            static_kwargs={"static_param": "test"},
            dynamic_kwargs={
                "dynamic_param": {
                    wf_cfg.STEP_INDEX_KEY: 0,
                    wf_cfg.DATATYPE_INDEX_KEY: 0
                }
            },
            step_index=1,
            base_step=5)
        self.assertEqual(workflow_step.step_index, 1,
                         "Wrong step index in created workflow step.")
        self.assertEqual(workflow_step.static_param, {'static_param': 'test'},
                         'Different static parameters on step.')
        self.assertEqual(
            workflow_step.dynamic_param, {
                'dynamic_param': {
                    wf_cfg.STEP_INDEX_KEY: 5,
                    wf_cfg.DATATYPE_INDEX_KEY: 0
                }
            },
            "Dynamic parameters not saved properly, or base workflow index not added to step index."
        )

    def test_create_workflow(self):
        """
        Test that a workflow with all the associated workflow steps is actually created.
        """
        workflow_step_list = [
            TestFactory.create_workflow_step("tvb_test.adapters.testadapter2",
                                             "TestAdapter2",
                                             static_kwargs={"test2": 2},
                                             step_index=1),
            TestFactory.create_workflow_step("tvb_test.adapters.testadapter1",
                                             "TestAdapter1",
                                             static_kwargs={
                                                 "test1_val1": 1,
                                                 "test1_val2": 1
                                             },
                                             step_index=2)
        ]
        burst_id = self.__create_complex_workflow(workflow_step_list)
        workflow_entities = dao.get_workflows_for_burst(burst_id)
        self.assertTrue(
            len(workflow_entities) == 1,
            "For some reason workflow was not stored in database.")
        workflow_steps = dao.get_workflow_steps(workflow_entities[0].id)
        self.assertEqual(len(workflow_steps),
                         len(workflow_step_list) + 1,
                         "Wrong number of workflow steps created.")
コード例 #3
0
class OperationServiceTest(BaseTestCase):
    """
    Test class for the introspection module. Some tests from here do async launches. For those
    cases Transactional tests won't work.
    """
    def setUp(self):
        """
        Reset the database before each test.
        """
        self.clean_database()
        initialize_storage()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)
        self.operation_service = OperationService()
        self.backup_hdd_size = TVBSettings.MAX_DISK_SPACE

    def tearDown(self):
        """
        Reset the database when test is done.
        """
        TVBSettings.MAX_DISK_SPACE = self.backup_hdd_size
        self.clean_database()

    def test_datatypes_groups(self):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        flow_service = FlowService()

        all_operations = dao.get_filtered_operations(self.test_project.id,
                                                     None)
        self.assertEqual(len(all_operations), 0,
                         "There should be no operation")

        algogroup = dao.find_group('tvb_test.adapters.testadapter3',
                                   'TestAdapter3')
        group, _ = flow_service.prepare_adapter(self.test_project.id,
                                                algogroup)
        adapter_instance = flow_service.build_adapter_instance(group)
        data = {'first_range': 'param_5', 'param_5': [1, 2]}
        ## Create Group of operations
        flow_service.fire_operation(adapter_instance, self.test_user,
                                    self.test_project.id, **data)

        all_operations = dao.get_filtered_operations(self.test_project.id,
                                                     None)
        self.assertEqual(len(all_operations), 1,
                         "Expected one operation group")
        self.assertEqual(all_operations[0][2], 2,
                         "Expected 2 operations in group")

        operation_group_id = all_operations[0][3]
        self.assertNotEquals(operation_group_id, None,
                             "The operation should be part of a group.")

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(operation_group_id)
        self.assertTrue(
            len(resulted_datatypes) >= 2,
            "Expected at least 2, but: " + str(len(resulted_datatypes)))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(
            operation_group_id)
        self.assertEqual(dt.fk_datatype_group, datatype_group.id,
                         "DataTypeGroup is incorrect")

    def test_initiate_operation(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        module = "tvb_test.adapters.testadapter1"
        class_name = "TestAdapter1"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test1_val1": 5, "test1_val2": 5}
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")
        res = self.operation_service.initiate_operation(
            self.test_user,
            self.test_project.id,
            adapter,
            tmp_folder,
            method_name=ABCAdapter.LAUNCH_METHOD,
            **data)
        self.assertTrue(
            res.index("has finished.") > 10, "Operation didn't finish")
        group = dao.find_group(module, class_name)
        self.assertEqual(group.module, 'tvb_test.adapters.testadapter1',
                         "Wrong data stored.")
        self.assertEqual(group.classname, 'TestAdapter1', "Wrong data stored.")
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype1)
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                         "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")

    def test_delete_dt_free_HDD_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        module = "tvb_test.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test": 100}
        TVBSettings.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")

        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 0)
        self.operation_service.initiate_operation(
            self.test_user,
            self.test_project.id,
            adapter,
            tmp_folder,
            method_name=ABCAdapter.LAUNCH_METHOD,
            **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 1)

        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                         "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")

        #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        ProjectService().remove_datatype(self.test_project.id, datatype.gid)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 0)

        self.operation_service.initiate_operation(
            self.test_user,
            self.test_project.id,
            adapter,
            tmp_folder,
            method_name=ABCAdapter.LAUNCH_METHOD,
            **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                         "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")

    def test_launch_two_ops_HDD_with_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        module = "tvb_test.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test": 100}
        TVBSettings.MAX_DISK_SPACE = 2 * float(
            adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")
        self.operation_service.initiate_operation(
            self.test_user,
            self.test_project.id,
            adapter,
            tmp_folder,
            method_name=ABCAdapter.LAUNCH_METHOD,
            **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                         "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")
        #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TVBSettings.MAX_DISK_SPACE = float(datatype.disk_size) + float(
            adapter.get_required_disk_size(**data))

        self.operation_service.initiate_operation(
            self.test_user,
            self.test_project.id,
            adapter,
            tmp_folder,
            method_name=ABCAdapter.LAUNCH_METHOD,
            **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 2)
        datatype = dao.get_datatype_by_id(dts[1][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                         "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")

    def test_launch_two_ops_HDD_full_space(self):
        """
        Launch two operations and give available space for user so that the first should finish,
        but after the update to the user hdd size the second should not.
        """
        module = "tvb_test.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test": 100}
        TVBSettings.MAX_DISK_SPACE = (
            1 + float(adapter.get_required_disk_size(**data)))
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")
        self.operation_service.initiate_operation(
            self.test_user,
            self.test_project.id,
            adapter,
            tmp_folder,
            method_name=ABCAdapter.LAUNCH_METHOD,
            **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                         "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")
        #Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TVBSettings.MAX_DISK_SPACE = float(datatype.disk_size - 1) + float(
            adapter.get_required_disk_size(**data) - 1)

        self.assertRaises(NoMemoryAvailableException,
                          self.operation_service.initiate_operation,
                          self.test_user,
                          self.test_project.id,
                          adapter,
                          tmp_folder,
                          method_name=ABCAdapter.LAUNCH_METHOD,
                          **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 1)

    def test_launch_operation_HDD_with_space(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        module = "tvb_test.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test": 100}
        TVBSettings.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")
        self.operation_service.initiate_operation(
            self.test_user,
            self.test_project.id,
            adapter,
            tmp_folder,
            method_name=ABCAdapter.LAUNCH_METHOD,
            **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                         "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")

    def test_launch_operation_HDD_with_space_started_ops(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        space_taken_by_started = 100
        module = "tvb_test.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        started_operation = model.Operation(
            self.test_user.id,
            self.test_project.id,
            group.id,
            "",
            status=model.STATUS_STARTED,
            result_disk_size=space_taken_by_started)
        dao.store_entity(started_operation)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test": 100}
        TVBSettings.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(**data) + space_taken_by_started)
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")
        self.operation_service.initiate_operation(
            self.test_user,
            self.test_project.id,
            adapter,
            tmp_folder,
            method_name=ABCAdapter.LAUNCH_METHOD,
            **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                         "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")

    def test_launch_operation_HDD_full_space(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        module = "tvb_test.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test": 100}
        TVBSettings.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(**data) - 1)
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")
        self.assertRaises(NoMemoryAvailableException,
                          self.operation_service.initiate_operation,
                          self.test_user,
                          self.test_project.id,
                          adapter,
                          tmp_folder,
                          method_name=ABCAdapter.LAUNCH_METHOD,
                          **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 0)

    def test_launch_operation_HDD_full_space_started_ops(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        space_taken_by_started = 100
        module = "tvb_test.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        started_operation = model.Operation(
            self.test_user.id,
            self.test_project.id,
            group.id,
            "",
            status=model.STATUS_STARTED,
            result_disk_size=space_taken_by_started)
        dao.store_entity(started_operation)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test": 100}
        TVBSettings.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(**data) + space_taken_by_started -
            1)
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")
        self.assertRaises(NoMemoryAvailableException,
                          self.operation_service.initiate_operation,
                          self.test_user,
                          self.test_project.id,
                          adapter,
                          tmp_folder,
                          method_name=ABCAdapter.LAUNCH_METHOD,
                          **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 0)

    def test_stop_operation(self):
        """
        Test that an operation is successfully stopped.
        """
        module = "tvb_test.adapters.testadapter2"
        class_name = "TestAdapter2"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test": 5}
        algo_group = adapter.algorithm_group
        algo_category = dao.get_category_by_id(algo_group.fk_category)
        algo = dao.get_algorithm_by_group(algo_group.id)
        operations, _ = self.operation_service.prepare_operations(
            self.test_user.id, self.test_project.id, algo, algo_category, {},
            ABCAdapter.LAUNCH_METHOD, **data)
        self.operation_service._send_to_cluster(operations, adapter)
        self.operation_service.stop_operation(operations[0].id)
        operation = dao.get_operation_by_id(operations[0].id)
        self.assertEqual(operation.status, model.STATUS_CANCELED,
                         "Operation should have been canceled!")

    def test_stop_operation_finished(self):
        """
        Test that an operation that is already finished is not changed by the stop operation.
        """
        module = "tvb_test.adapters.testadapter1"
        class_name = "TestAdapter1"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test1_val1": 5, 'test1_val2': 5}
        algo_group = adapter.algorithm_group
        algo_category = dao.get_category_by_id(algo_group.fk_category)
        algo = dao.get_algorithm_by_group(algo_group.id)
        operations, _ = self.operation_service.prepare_operations(
            self.test_user.id, self.test_project.id, algo, algo_category, {},
            ABCAdapter.LAUNCH_METHOD, **data)
        self.operation_service._send_to_cluster(operations, adapter)
        operation = dao.get_operation_by_id(operations[0].id)
        operation.status = model.STATUS_FINISHED
        dao.store_entity(operation)
        self.operation_service.stop_operation(operations[0].id)
        operation = dao.get_operation_by_id(operations[0].id)
        self.assertEqual(operation.status, model.STATUS_FINISHED,
                         "Operation shouldn't have been canceled!")

    def test_array_from_string(self):
        """
        Simple test for parse array on 1d, 2d and 3d array.
        """
        row = {
            'description': 'test.',
            'default': 'None',
            'required': True,
            'label': 'test: ',
            'attributes': None,
            'quantifier': 'manual',
            'elementType': 'float',
            'type': 'array',
            'options': None,
            'name': 'test'
        }
        input_data_string = '[ [1 2 3] [4 5 6]]'
        output = string2array(input_data_string, ' ', row['elementType'])
        self.assertEqual(output.shape, (2, 3),
                         "Dimensions not properly parsed")
        for i in output[0]:
            self.assertTrue(i in [1, 2, 3])
        for i in output[1]:
            self.assertTrue(i in [4, 5, 6])
        input_data_string = '[1, 2, 3, 4, 5, 6]'
        output = string2array(input_data_string, ',', row['elementType'])
        self.assertEqual(output.shape, (6, ), "Dimensions not properly parsed")
        for i in output:
            self.assertTrue(i in [1, 2, 3, 4, 5, 6])
        input_data_string = '[ [ [1,1], [2, 2] ], [ [3 ,3], [4,4] ] ]'
        output = string2array(input_data_string, ',', row['elementType'])
        self.assertEqual(output.shape, (2, 2, 2), "Wrong dimensions.")
        for i in output[0][0]:
            self.assertTrue(i == 1)
        for i in output[0][1]:
            self.assertTrue(i == 2)
        for i in output[1][0]:
            self.assertTrue(i == 3)
        for i in output[1][1]:
            self.assertTrue(i == 4)
        row = {
            'description': 'test.',
            'default': 'None',
            'required': True,
            'label': 'test: ',
            'attributes': None,
            'quantifier': 'manual',
            'elementType': 'str',
            'type': 'array',
            'options': None,
            'name': 'test'
        }
        input_data_string = '[1, 2, 3, 4, 5, 6]'
        output = string2array(input_data_string, ',', row['elementType'])
        for i in output:
            self.assertTrue(i in [1, 2, 3, 4, 5, 6])

    def test_wrong_array_from_string(self):
        """Test that parsing an array from string is throwing the expected 
        exception when wrong input string"""
        row = {
            'description': 'test.',
            'default': 'None',
            'required': True,
            'label': 'test: ',
            'attributes': None,
            'quantifier': 'manual',
            'elementType': 'float',
            'type': 'array',
            'options': None,
            'name': 'test'
        }
        input_data_string = '[ [1,2 3] [4,5,6]]'
        self.assertRaises(ValueError, string2array, input_data_string, ',',
                          row['elementType'])
        input_data_string = '[ [1,2,wrong], [4, 5, 6]]'
        self.assertRaises(ValueError, string2array, input_data_string, ',',
                          row['elementType'])
        row = {
            'description': 'test.',
            'default': 'None',
            'required': True,
            'label': 'test: ',
            'attributes': None,
            'quantifier': 'manual',
            'elementType': 'str',
            'type': 'array',
            'options': None,
            'name': 'test'
        }
        output = string2array(input_data_string, ',', row['elementType'])
        self.assertEqual(output.shape, (2, 3))
        self.assertEqual(output[0][2], 'wrong',
                         'String data not converted properly')
        input_data_string = '[ [1,2 3] [4,5,6]]'
        output = string2array(input_data_string, ',', row['elementType'])
        self.assertEqual(output[0][1], '2 3')

    def test_reduce_dimension_component(self):
        """
         This method tests if the data passed to the launch method of
         the NDimensionArrayAdapter adapter is correct. The passed data should be a list
         of arrays with one dimension.
        """
        inserted_data = FlowService().get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray")
        self.assertEqual(len(inserted_data), 0, "Expected to find no data.")
        #create an operation
        algorithm_id = FlowService().get_algorithm_by_module_and_class(
            'tvb_test.adapters.ndimensionarrayadapter',
            'NDimensionArrayAdapter')[0].id
        operation = model.Operation(self.test_user.id,
                                    self.test_project.id,
                                    algorithm_id,
                                    'test params',
                                    meta=json.dumps(
                                        {DataTypeMetaData.KEY_STATE: "RAW"}),
                                    status=model.STATUS_FINISHED,
                                    method_name=ABCAdapter.LAUNCH_METHOD)
        operation = dao.store_entity(operation)
        #save the array wrapper in DB
        adapter_instance = NDimensionArrayAdapter()
        PARAMS = {}
        self.operation_service.initiate_prelaunch(operation, adapter_instance,
                                                  {}, **PARAMS)
        inserted_data = FlowService().get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray")
        self.assertEqual(len(inserted_data), 1, "Problems when inserting data")
        gid = inserted_data[0][2]
        entity = dao.get_datatype_by_gid(gid)
        #from the 3D array do not select any array
        PARAMS = {
            "python_method": "reduce_dimension",
            "input_data": gid,
            "input_data_dimensions_0": "requiredDim_1",
            "input_data_dimensions_1": "",
            "input_data_dimensions_2": ""
        }
        try:
            self.operation_service.initiate_prelaunch(operation,
                                                      adapter_instance, {},
                                                      **PARAMS)
            self.fail(
                "Test should not pass. The resulted array should be a 1D array."
            )
        except Exception:
            # OK, do nothing; we were expecting to produce a 1D array
            pass
        #from the 3D array select only a 1D array
        first_dim = [gid + '_1_0', 'requiredDim_1']
        PARAMS = {
            "python_method": "reduce_dimension",
            "input_data": gid,
            "input_data_dimensions_0": first_dim,
            "input_data_dimensions_1": gid + "_2_1"
        }
        self.operation_service.initiate_prelaunch(operation, adapter_instance,
                                                  {}, **PARAMS)
        expected_result = entity.array_data[:, 0, 1]
        actual_result = adapter_instance.launch_param
        self.assertEqual(len(actual_result), len(expected_result),
                         "Not the same size for results!")
        self.assertTrue(numpy.equal(actual_result, expected_result).all())

        #from the 3D array select a 2D array
        first_dim = [gid + '_1_0', gid + '_1_1', 'requiredDim_2']
        PARAMS = {
            "python_method": "reduce_dimension",
            "input_data": gid,
            "input_data_dimensions_0": first_dim,
            "input_data_dimensions_1": gid + "_2_1"
        }
        self.operation_service.initiate_prelaunch(operation, adapter_instance,
                                                  {}, **PARAMS)
        expected_result = entity.array_data[slice(0, None), [0, 1], 1]
        actual_result = adapter_instance.launch_param
        self.assertEqual(len(actual_result), len(expected_result),
                         "Not the same size for results!")
        self.assertTrue(numpy.equal(actual_result, expected_result).all())

        #from 3D array select 1D array by applying SUM function on the first
        #dimension and average function on the second dimension
        PARAMS = {
            "python_method": "reduce_dimension",
            "input_data": gid,
            "input_data_dimensions_0": ["requiredDim_1", "func_sum"],
            "input_data_dimensions_1": "func_average",
            "input_data_dimensions_2": ""
        }
        self.operation_service.initiate_prelaunch(operation, adapter_instance,
                                                  {}, **PARAMS)
        aux = numpy.sum(entity.array_data, axis=0)
        expected_result = numpy.average(aux, axis=0)
        actual_result = adapter_instance.launch_param
        self.assertEqual(len(actual_result), len(expected_result),
                         "Not the same size of results!")
        self.assertTrue(numpy.equal(actual_result, expected_result).all())

        #from 3D array select a 2D array and apply op. on the second dimension
        PARAMS = {
            "python_method":
            "reduce_dimension",
            "input_data":
            gid,
            "input_data_dimensions_0": [
                "requiredDim_2", "func_sum", "expected_shape_x,512",
                "operations_x,>"
            ],
            "input_data_dimensions_1":
            "",
            "input_data_dimensions_2":
            ""
        }
        try:
            self.operation_service.initiate_prelaunch(operation,
                                                      adapter_instance, {},
                                                      **PARAMS)
            self.fail(
                "Test should not pass! The second dimension of the array should be >512."
            )
        except Exception:
            # OK, do nothing;
            pass