示例#1
0
    def cancel_or_remove_burst(self, burst_id):
        """
        Cancel (if burst is still running) or Remove the burst given by burst_id.
        :returns True when Remove operation was done and False when Cancel
        """
        burst_entity = dao.get_burst_by_id(burst_id)
        if burst_entity.status == burst_entity.BURST_RUNNING:
            self.stop_burst(burst_entity)
            return False

        service = ProjectService()
        ## Remove each DataType in current burst.
        ## We can not leave all on cascade, because it won't work on SQLite for mapped dataTypes.
        datatypes = dao.get_all_datatypes_in_burst(burst_id)
        ## Get operations linked to current burst before removing the burst or else
        ##    the burst won't be there to identify operations any more.
        remaining_ops = dao.get_operations_in_burst(burst_id)

        #Remove burst first to delete work-flow steps which still hold foreign keys to operations.
        correct = dao.remove_entity(burst_entity.__class__, burst_id)
        if not correct:
            raise RemoveDataTypeException("Could not remove Burst entity!")

        for datatype in datatypes:
            service.remove_datatype(burst_entity.fk_project, datatype.gid,
                                    False)

        ## Remove all Operations remained.
        correct = True
        remaining_op_groups = set()
        project = dao.get_project_by_id(burst_entity.fk_project)

        for oper in remaining_ops:
            is_remaining = dao.get_generic_entity(oper.__class__, oper.id)
            if len(is_remaining) == 0:
                ### Operation removed cascaded.
                continue
            if oper.fk_operation_group is not None and oper.fk_operation_group not in remaining_op_groups:
                is_remaining = dao.get_generic_entity(model.OperationGroup,
                                                      oper.fk_operation_group)
                if len(is_remaining) > 0:
                    remaining_op_groups.add(oper.fk_operation_group)
                    correct = correct and dao.remove_entity(
                        model.OperationGroup, oper.fk_operation_group)
            correct = correct and dao.remove_entity(oper.__class__, oper.id)
            service.structure_helper.remove_operation_data(
                project.name, oper.id)

        if not correct:
            raise RemoveDataTypeException(
                "Could not remove Burst because a linked operation could not be dropped!!"
            )
        return True
示例#2
0
    def cancel_or_remove_burst(self, burst_id):
        """
        Cancel (if burst is still running) or Remove the burst given by burst_id.
        :returns True when Remove operation was done and False when Cancel
        """
        burst_entity = dao.get_burst_by_id(burst_id)
        if burst_entity.status == burst_entity.BURST_RUNNING:
            self.stop_burst(burst_entity)
            return False

        service = ProjectService()
        ## Remove each DataType in current burst.
        ## We can not leave all on cascade, because it won't work on SQLite for mapped dataTypes.
        datatypes = dao.get_all_datatypes_in_burst(burst_id)
        ## Get operations linked to current burst before removing the burst or else
        ##    the burst won't be there to identify operations any more.
        remaining_ops = dao.get_operations_in_burst(burst_id)

        # Remove burst first to delete work-flow steps which still hold foreign keys to operations.
        correct = dao.remove_entity(burst_entity.__class__, burst_id)
        if not correct:
            raise RemoveDataTypeException("Could not remove Burst entity!")

        for datatype in datatypes:
            service.remove_datatype(burst_entity.fk_project, datatype.gid, False)

        ## Remove all Operations remained.
        correct = True
        remaining_op_groups = set()
        project = dao.get_project_by_id(burst_entity.fk_project)

        for oper in remaining_ops:
            is_remaining = dao.get_generic_entity(oper.__class__, oper.id)
            if len(is_remaining) == 0:
                ### Operation removed cascaded.
                continue
            if oper.fk_operation_group is not None and oper.fk_operation_group not in remaining_op_groups:
                is_remaining = dao.get_generic_entity(model.OperationGroup, oper.fk_operation_group)
                if len(is_remaining) > 0:
                    remaining_op_groups.add(oper.fk_operation_group)
                    correct = correct and dao.remove_entity(model.OperationGroup, oper.fk_operation_group)
            correct = correct and dao.remove_entity(oper.__class__, oper.id)
            service.structure_helper.remove_operation_data(project.name, oper.id)

        if not correct:
            raise RemoveDataTypeException("Could not remove Burst because a linked operation could not be dropped!!")
        return True
示例#3
0
class TestRemove(TransactionalTestCase):
    """
    This class contains tests for the service layer related to remove of DataTypes.
    """
    def transactional_setup_method(self):
        """
        Prepare the database before each test.
        """
        self.import_service = ImportService()
        self.flow_service = FlowService()
        self.project_service = ProjectService()
        self.test_user = TestFactory.create_user()

        self.delete_project_folders()
        result = self.count_all_entities(DataType)
        assert 0 == result, "There should be no data type in DB"
        result = self.count_all_entities(Project)
        assert 0 == result

        self.test_project = TestFactory.import_default_project(self.test_user)
        self.operation = TestFactory.create_operation(
            test_user=self.test_user, test_project=self.test_project)
        self.adapter_instance = TestFactory.create_adapter()

    def transactional_teardown_method(self):
        """
        Reset the database when test is done.
        """
        self.delete_project_folders()

    def test_remove_used_connectivity(self):
        """
        Tests the remove of a connectivity which is used by other data types
        """
        conn, conn_count = self.flow_service.get_available_datatypes(
            self.test_project.id, Connectivity)
        count_rm = self.count_all_entities(RegionMapping)
        assert 1 == conn_count
        assert 1 == count_rm

        conn_gid = conn[0][2]
        try:
            self.project_service.remove_datatype(self.test_project.id,
                                                 conn_gid)
            raise AssertionError(
                "The connectivity is still used. It should not be possible to remove it."
                + str(conn_gid))
        except RemoveDataTypeException:
            #OK, do nothing
            pass

        res = dao.get_datatype_by_gid(conn_gid)
        assert conn[0].id == res.id, "Used connectivity removed"

    def test_remove_used_surface(self):
        """
        Tries to remove an used surface
        """
        mapping, mapping_count = self.flow_service.get_available_datatypes(
            self.test_project.id, RegionMapping)
        assert 1 == mapping_count, "There should be one Mapping."
        mapping_gid = mapping[0][2]
        mapping = ABCAdapter.load_entity_by_gid(mapping_gid)
        surface = dao.get_datatype_by_gid(mapping.surface.gid)
        assert surface.gid == mapping.surface.gid, "The surfaces should have the same GID"
        try:
            self.project_service.remove_datatype(self.test_project.id,
                                                 surface.gid)
            raise AssertionError(
                "The surface should still be used by a RegionMapping " +
                str(surface.gid))
        except RemoveDataTypeException:
            #OK, do nothing
            pass

        res = dao.get_datatype_by_gid(surface.gid)
        assert surface.id == res.id, "A used surface was deleted"

    def _remove_entity(self, data_class, before_number):
        """
        Try to remove entity. Fail otherwise.
        """
        dts, count = self.flow_service.get_available_datatypes(
            self.test_project.id, data_class)
        assert count == before_number
        for dt in dts:
            data_gid = dt[2]
            self.project_service.remove_datatype(self.test_project.id,
                                                 data_gid)
            res = dao.get_datatype_by_gid(data_gid)
            assert None == res, "The entity was not deleted"

    def test_happyflow_removedatatypes(self):
        """
        Tests the happy flow for the deletion multiple entities.
        They are tested together because they depend on each other and they
        have to be removed in a certain order.
        """
        self._remove_entity(LocalConnectivity, 1)
        self._remove_entity(RegionMapping, 1)
        ### Remove Surfaces
        # SqlAlchemy has no uniform way to retrieve Surface as base (wild-character for polymorphic_identity)
        self._remove_entity(Surface, 6)
        ### Remove a Connectivity
        self._remove_entity(Connectivity, 1)

    def test_remove_time_series(self):
        """
        Tests the happy flow for the deletion of a time series.
        """
        count_ts = self.count_all_entities(TimeSeries)
        assert 0 == count_ts, "There should be no time series"
        self._create_timeseries()
        series = self.get_all_entities(TimeSeries)
        assert 1 == len(series), "There should be only one time series"
        self.project_service.remove_datatype(self.test_project.id,
                                             series[0].gid)
        res = dao.get_datatype_by_gid(series[0].gid)
        assert None == res, "The time series was not deleted."

    def test_remove_array_wrapper(self):
        """
        Tests the happy flow for the deletion of an array wrapper.
        """
        count_array = self.count_all_entities(MappedArray)
        assert 1 == count_array
        data = {'param_1': 'some value'}
        OperationService().initiate_prelaunch(self.operation,
                                              self.adapter_instance, {},
                                              **data)
        array_wrappers = self.get_all_entities(MappedArray)
        assert 2 == len(array_wrappers)
        array_gid = array_wrappers[0].gid
        self.project_service.remove_datatype(self.test_project.id, array_gid)
        res = dao.get_datatype_by_gid(array_gid)
        assert None == res, "The array wrapper was not deleted."

    def test_remove_value_wrapper(self):
        """
        Test the deletion of a value wrapper dataType
        """
        count_vals = self.count_all_entities(ValueWrapper)
        assert 0 == count_vals, "There should be no value wrapper"
        value_wrapper = self._create_value_wrapper()
        self.project_service.remove_datatype(self.test_project.id,
                                             value_wrapper.gid)
        res = dao.get_datatype_by_gid(value_wrapper.gid)
        assert None == res, "The value wrapper was not deleted."

    def _create_timeseries(self):
        """Launch adapter to persist a TimeSeries entity"""
        storage_path = FilesHelper().get_project_folder(
            self.test_project, str(self.operation.id))

        time_series = TimeSeries()
        time_series.sample_period = 10.0
        time_series.start_time = 0.0
        time_series.storage_path = storage_path
        time_series.write_data_slice(numpy.array([1.0, 2.0, 3.0]))
        time_series.close_file()
        time_series.sample_period_unit = 'ms'

        self._store_entity(time_series, "TimeSeries",
                           "tvb.datatypes.time_series")
        count_ts = self.count_all_entities(TimeSeries)
        assert 1 == count_ts, "Should be only one TimeSeries"

    def _create_value_wrapper(self):
        """Persist ValueWrapper"""
        value_ = ValueWrapper(data_value=5.0, data_name="my_value")
        self._store_entity(value_, "ValueWrapper",
                           "tvb.datatypes.mapped_values")
        valuew = self.get_all_entities(ValueWrapper)
        assert 1 == len(valuew), "Should be one value wrapper"
        return ABCAdapter.load_entity_by_gid(valuew[0].gid)

    def _store_entity(self, entity, type_, module):
        """Launch adapter to store a create a persistent DataType."""
        entity.type = type_
        entity.module = module
        entity.subject = "John Doe"
        entity.state = "RAW_STATE"
        entity.set_operation_id(self.operation.id)
        adapter_instance = StoreAdapter([entity])
        OperationService().initiate_prelaunch(self.operation, adapter_instance,
                                              {})
class TestProjectStructure(TransactionalTestCase):
    """
    Test ProjectService methods (part related to Project Data Structure).
    """

    def transactional_setup_method(self):
        """
        Prepare before each test.
        """
        self.project_service = ProjectService()
        self.flow_service = FlowService()
        self.structure_helper = FilesHelper()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user, "ProjectStructure")

        self.relevant_filter = StaticFiltersFactory.build_datatype_filters(
            single_filter=StaticFiltersFactory.RELEVANT_VIEW)
        self.full_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.FULL_VIEW)

    def transactional_teardown_method(self):
        """
        Clear project folders after testing
        """
        self.delete_project_folders()

    def test_set_operation_visibility(self):
        """
        Check if the visibility for an operation is set correct.
        """
        self.__init_algorithmn()
        op1 = Operation(self.test_user.id, self.test_project.id, self.algo_inst.id, "")
        op1 = dao.store_entity(op1)
        assert op1.visible, "The operation should be visible."
        self.project_service.set_operation_and_group_visibility(op1.gid, False)
        updated_op = dao.get_operation_by_id(op1.id)
        assert not updated_op.visible, "The operation should not be visible."

    def test_set_op_and_group_visibility(self):
        """
        When changing the visibility for an operation that belongs to an operation group, we
        should also change the visibility for the entire group of operations.
        """
        _, group_id = TestFactory.create_group(self.test_user, subject="test-subject-1")
        list_of_operations = dao.get_operations_in_group(group_id)
        for operation in list_of_operations:
            assert operation.visible, "The operation should be visible."
        self.project_service.set_operation_and_group_visibility(list_of_operations[0].gid, False)
        operations = dao.get_operations_in_group(group_id)
        for operation in operations:
            assert not operation.visible, "The operation should not be visible."

    def test_set_op_group_visibility(self):
        """
        Tests if the visibility for an operation group is set correct.
        """
        _, group_id = TestFactory.create_group(self.test_user, subject="test-subject-1")
        list_of_operations = dao.get_operations_in_group(group_id)
        for operation in list_of_operations:
            assert operation.visible, "The operation should be visible."
        op_group = dao.get_operationgroup_by_id(group_id)
        self.project_service.set_operation_and_group_visibility(op_group.gid, False, True)
        operations = dao.get_operations_in_group(group_id)
        for operation in operations:
            assert not operation.visible, "The operation should not be visible."

    def test_is_upload_operation(self):
        """
        Tests that upload and non-upload algorithms are created and run accordingly
        """
        self.__init_algorithmn()
        upload_algo = self._create_algo_for_upload()
        op1 = Operation(self.test_user.id, self.test_project.id, self.algo_inst.id, "")
        op2 = Operation(self.test_user.id, self.test_project.id, upload_algo.id, "")
        operations = dao.store_entities([op1, op2])
        is_upload_operation = self.project_service.is_upload_operation(operations[0].gid)
        assert not is_upload_operation, "The operation is not an upload operation."
        is_upload_operation = self.project_service.is_upload_operation(operations[1].gid)
        assert is_upload_operation, "The operation is an upload operation."

    def test_get_upload_operations(self):
        """
        Test get_all when filter is for Upload category.
        """
        self.__init_algorithmn()
        upload_algo = self._create_algo_for_upload()

        project = Project("test_proj_2", self.test_user.id, "desc")
        project = dao.store_entity(project)

        op1 = Operation(self.test_user.id, self.test_project.id, self.algo_inst.id, "")
        op2 = Operation(self.test_user.id, project.id, upload_algo.id, "", status=STATUS_FINISHED)
        op3 = Operation(self.test_user.id, self.test_project.id, upload_algo.id, "")
        op4 = Operation(self.test_user.id, self.test_project.id, upload_algo.id, "", status=STATUS_FINISHED)
        op5 = Operation(self.test_user.id, self.test_project.id, upload_algo.id, "", status=STATUS_FINISHED)
        operations = dao.store_entities([op1, op2, op3, op4, op5])

        upload_operations = self.project_service.get_all_operations_for_uploaders(self.test_project.id)
        assert 2 == len(upload_operations), "Wrong number of upload operations."
        upload_ids = [operation.id for operation in upload_operations]
        for i in [3, 4]:
            assert operations[i].id in upload_ids, \
                "The operation should be an upload operation."
        for i in [0, 1, 2]:
            assert not operations[i].id in upload_ids, \
                "The operation should not be an upload operation."

    def test_is_datatype_group(self):
        """
        Tests if a datatype is group.
        """
        _, dt_group_id, first_dt, _ = self._create_datatype_group()
        dt_group = dao.get_generic_entity(DataTypeGroup, dt_group_id)[0]
        is_dt_group = self.project_service.is_datatype_group(dt_group.gid)
        assert is_dt_group, "The datatype should be a datatype group."
        is_dt_group = self.project_service.is_datatype_group(first_dt.gid)
        assert not is_dt_group, "The datatype should not be a datatype group."

    def test_count_datatypes_in_group(self):
        """ Test that counting dataTypes is correct. Happy flow."""
        _, dt_group_id, first_dt, _ = self._create_datatype_group()
        count = dao.count_datatypes_in_group(dt_group_id)
        assert count == 2
        count = dao.count_datatypes_in_group(first_dt.id)
        assert count == 0, "There should be no dataType."

    def test_set_datatype_visibility(self):
        """
        Check if the visibility for a datatype is set correct.
        """
        # it's a list of 3 elem.
        mapped_arrays = self._create_mapped_arrays(self.test_project.id)
        for mapped_array in mapped_arrays:
            is_visible = dao.get_datatype_by_id(mapped_array[0]).visible
            assert is_visible, "The data type should be visible."

        self.project_service.set_datatype_visibility(mapped_arrays[0][2], False)
        for i in range(len(mapped_arrays)):
            is_visible = dao.get_datatype_by_id(mapped_arrays[i][0]).visible
            if not i:
                assert not is_visible, "The data type should not be visible."
            else:
                assert is_visible, "The data type should be visible."

    def test_set_visibility_for_dt_in_group(self):
        """
        Check if the visibility for a datatype from a datatype group is set correct.
        """
        _, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        assert first_dt.visible, "The data type should be visible."
        assert second_dt.visible, "The data type should be visible."
        self.project_service.set_datatype_visibility(first_dt.gid, False)

        db_dt_group = self.project_service.get_datatype_by_id(dt_group_id)
        db_first_dt = self.project_service.get_datatype_by_id(first_dt.id)
        db_second_dt = self.project_service.get_datatype_by_id(second_dt.id)

        assert not db_dt_group.visible, "The data type should be visible."
        assert not db_first_dt.visible, "The data type should not be visible."
        assert not db_second_dt.visible, "The data type should be visible."

    def test_set_visibility_for_group(self):
        """
        Check if the visibility for a datatype group is set correct.
        """
        _, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        dt_group = dao.get_generic_entity(DataTypeGroup, dt_group_id)[0]

        assert dt_group.visible, "The data type group should be visible."
        assert first_dt.visible, "The data type should be visible."
        assert second_dt.visible, "The data type should be visible."
        self.project_service.set_datatype_visibility(dt_group.gid, False)

        updated_dt_group = self.project_service.get_datatype_by_id(dt_group_id)
        updated_first_dt = self.project_service.get_datatype_by_id(first_dt.id)
        updated_second_dt = self.project_service.get_datatype_by_id(second_dt.id)

        assert not updated_dt_group.visible, "The data type group should be visible."
        assert not updated_first_dt.visible, "The data type should be visible."
        assert not updated_second_dt.visible, "The data type should be visible."

    def test_getdatatypes_from_dtgroup(self):
        """
        Validate that we can retrieve all DTs from a DT_Group
        """
        _, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        datatypes = self.project_service.get_datatypes_from_datatype_group(dt_group_id)
        assert len(datatypes) == 2, "There should be 2 datatypes into the datatype group."
        expected_dict = {first_dt.id: first_dt, second_dt.id: second_dt}
        actual_dict = {datatypes[0].id: datatypes[0], datatypes[1].id: datatypes[1]}

        for key in expected_dict:
            expected = expected_dict[key]
            actual = actual_dict[key]
            assert expected.id == actual.id, "Not the same id."
            assert expected.gid == actual.gid, "Not the same gid."
            assert expected.type == actual.type, "Not the same type."
            assert expected.subject == actual.subject, "Not the same subject."
            assert expected.state == actual.state, "Not the same state."
            assert expected.visible == actual.visible, "The datatype visibility is not correct."
            assert expected.module == actual.module, "Not the same module."
            assert expected.user_tag_1 == actual.user_tag_1, "Not the same user_tag_1."
            assert expected.invalid == actual.invalid, "The invalid field value is not correct."
            assert expected.is_nan == actual.is_nan, "The is_nan field value is not correct."

    def test_get_operations_for_dt(self):
        """
        Tests method get_operations_for_datatype.
        Verifies result dictionary has the correct values
        """
        created_ops, datatype_gid = self._create_operations_with_inputs()
        operations = self.project_service.get_operations_for_datatype(datatype_gid, self.relevant_filter)
        assert len(operations) == 2
        assert created_ops[0].id in [operations[0].id, operations[1].id], "Retrieved wrong operations."
        assert created_ops[2].id in [operations[0].id, operations[1].id], "Retrieved wrong operations."

        operations = self.project_service.get_operations_for_datatype(datatype_gid, self.full_filter)
        assert len(operations) == 4
        ids = [operations[0].id, operations[1].id, operations[2].id, operations[3].id]
        for i in range(4):
            assert created_ops[i].id in ids, "Retrieved wrong operations."

        operations = self.project_service.get_operations_for_datatype(datatype_gid, self.relevant_filter, True)
        assert len(operations) == 1
        assert created_ops[4].id == operations[0].id, "Incorrect number of operations."

        operations = self.project_service.get_operations_for_datatype(datatype_gid, self.full_filter, True)
        assert len(operations) == 2
        assert created_ops[4].id in [operations[0].id, operations[1].id], "Retrieved wrong operations."
        assert created_ops[5].id in [operations[0].id, operations[1].id], "Retrieved wrong operations."

    def test_get_operations_for_dt_group(self):
        """
        Tests method get_operations_for_datatype_group.
        Verifies filters' influence over results is as expected
        """
        created_ops, dt_group_id = self._create_operations_with_inputs(True)

        ops = self.project_service.get_operations_for_datatype_group(dt_group_id, self.relevant_filter)
        assert len(ops) == 2
        assert created_ops[0].id in [ops[0].id, ops[1].id], "Retrieved wrong operations."
        assert created_ops[2].id in [ops[0].id, ops[1].id], "Retrieved wrong operations."

        ops = self.project_service.get_operations_for_datatype_group(dt_group_id, self.full_filter)
        assert len(ops) == 4, "Incorrect number of operations."
        ids = [ops[0].id, ops[1].id, ops[2].id, ops[3].id]
        for i in range(4):
            assert created_ops[i].id in ids, "Retrieved wrong operations."

        ops = self.project_service.get_operations_for_datatype_group(dt_group_id, self.relevant_filter, True)
        assert len(ops) == 1
        assert created_ops[4].id == ops[0].id, "Incorrect number of operations."

        ops = self.project_service.get_operations_for_datatype_group(dt_group_id, self.full_filter, True)
        assert len(ops), 2
        assert created_ops[4].id in [ops[0].id, ops[1].id], "Retrieved wrong operations."
        assert created_ops[5].id in [ops[0].id, ops[1].id], "Retrieved wrong operations."

    def test_get_inputs_for_operation(self):
        """
        Tests method get_datatype_and_datatypegroup_inputs_for_operation.
        Verifies filters' influence over results is as expected
        """
        algo = dao.get_algorithm_by_module('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')

        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        ids = []
        for datatype in array_wrappers:
            ids.append(datatype[0])

        datatype = dao.get_datatype_by_id(ids[0])
        datatype.visible = False
        dao.store_entity(datatype)

        parameters = json.dumps({"param_5": "1", "param_1": array_wrappers[0][2],
                                 "param_2": array_wrappers[1][2], "param_3": array_wrappers[2][2], "param_6": "0"})
        operation = Operation(self.test_user.id, self.test_project.id, algo.id, parameters)
        operation = dao.store_entity(operation)

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.relevant_filter)
        assert len(inputs) == 2
        assert ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType."
        assert ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType."
        assert not ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType."

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.full_filter)
        assert len(inputs) == 3, "Incorrect number of operations."
        assert ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType."
        assert ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType."
        assert ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType."

        project, dt_group_id, first_dt, _ = self._create_datatype_group()
        first_dt.visible = False
        dao.store_entity(first_dt)
        parameters = json.dumps({"other_param": "_", "param_1": first_dt.gid})
        operation = Operation(self.test_user.id, project.id, algo.id, parameters)
        operation = dao.store_entity(operation)

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.relevant_filter)
        assert len(inputs) == 0, "Incorrect number of dataTypes."
        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.full_filter)
        assert len(inputs) == 1, "Incorrect number of dataTypes."
        assert inputs[0].id == dt_group_id, "Wrong dataType."
        assert inputs[0].id != first_dt.id, "Wrong dataType."

    def test_get_inputs_for_op_group(self):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The DataType inputs will be from a DataType group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        first_dt.visible = False
        dao.store_entity(first_dt)
        second_dt.visible = False
        dao.store_entity(second_dt)

        op_group = OperationGroup(project.id, "group", "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({"param_5": "1", "param_1": first_dt.gid, "param_6": "2"})
        params_2 = json.dumps({"param_5": "1", "param_4": second_dt.gid, "param_6": "5"})

        algo = dao.get_algorithm_by_module('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')

        op1 = Operation(self.test_user.id, project.id, algo.id, params_1, op_group_id=op_group.id)
        op2 = Operation(self.test_user.id, project.id, algo.id, params_2, op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        assert len(inputs) == 0

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        assert len(inputs) == 1, "Incorrect number of dataTypes."
        assert not first_dt.id == inputs[0].id, "Retrieved wrong dataType."
        assert not second_dt.id == inputs[0].id, "Retrieved wrong dataType."
        assert dt_group_id == inputs[0].id, "Retrieved wrong dataType."

        first_dt.visible = True
        dao.store_entity(first_dt)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        assert len(inputs) == 1, "Incorrect number of dataTypes."
        assert not first_dt.id == inputs[0].id, "Retrieved wrong dataType."
        assert not second_dt.id == inputs[0].id, "Retrieved wrong dataType."
        assert dt_group_id == inputs[0].id, "Retrieved wrong dataType."

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        assert len(inputs) == 1, "Incorrect number of dataTypes."
        assert not first_dt.id == inputs[0].id, "Retrieved wrong dataType."
        assert not second_dt.id == inputs[0].id, "Retrieved wrong dataType."
        assert dt_group_id == inputs[0].id, "Retrieved wrong dataType."

    def test_get_inputs_for_op_group_simple_inputs(self):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The dataType inputs will not be part of a dataType group.
        """
        # it's a list of 3 elem.
        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        array_wrapper_ids = []
        for datatype in array_wrappers:
            array_wrapper_ids.append(datatype[0])

        datatype = dao.get_datatype_by_id(array_wrapper_ids[0])
        datatype.visible = False
        dao.store_entity(datatype)

        op_group = OperationGroup(self.test_project.id, "group", "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({"param_5": "2", "param_1": array_wrappers[0][2],
                               "param_2": array_wrappers[1][2], "param_6": "7"})
        params_2 = json.dumps({"param_5": "5", "param_3": array_wrappers[2][2],
                               "param_2": array_wrappers[1][2], "param_6": "6"})

        algo = dao.get_algorithm_by_module('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')

        op1 = Operation(self.test_user.id, self.test_project.id, algo.id, params_1, op_group_id=op_group.id)
        op2 = Operation(self.test_user.id, self.test_project.id, algo.id, params_2, op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        assert len(inputs) == 2
        assert not array_wrapper_ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType."
        assert array_wrapper_ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType."
        assert array_wrapper_ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType."

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        assert len(inputs) == 3, "Incorrect number of dataTypes."
        assert array_wrapper_ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id]
        assert array_wrapper_ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id]
        assert array_wrapper_ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id]

    def test_remove_datatype(self):
        """
        Tests the deletion of a datatype.
        """
        # it's a list of 3 elem.
        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        dt_list = []
        for array_wrapper in array_wrappers:
            dt_list.append(dao.get_datatype_by_id(array_wrapper[0]))

        self.project_service.remove_datatype(self.test_project.id, dt_list[0].gid)
        self._check_if_datatype_was_removed(dt_list[0])

    def test_remove_datatype_from_group(self):
        """
        Tests the deletion of a datatype group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        datatype_group = dao.get_generic_entity(DataTypeGroup, dt_group_id)[0]

        self.project_service.remove_datatype(project.id, first_dt.gid)
        self._check_if_datatype_was_removed(first_dt)
        self._check_if_datatype_was_removed(second_dt)
        self._check_if_datatype_was_removed(datatype_group)
        self._check_datatype_group_removed(dt_group_id, datatype_group.fk_operation_group)

    def test_remove_datatype_group(self):
        """
        Tests the deletion of a datatype group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        datatype_group = dao.get_generic_entity(DataTypeGroup, dt_group_id)[0]

        self.project_service.remove_datatype(project.id, datatype_group.gid)
        self._check_if_datatype_was_removed(first_dt)
        self._check_if_datatype_was_removed(second_dt)
        self._check_if_datatype_was_removed(datatype_group)
        self._check_datatype_group_removed(dt_group_id, datatype_group.fk_operation_group)

    def _create_mapped_arrays(self, project_id):
        """
        :param project_id: the project in which the arrays are created
        :return: a list of dummy `MappedArray`
        """
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        assert count == 0

        group = dao.get_algorithm_by_module('tvb.tests.framework.adapters.ndimensionarrayadapter',
                                            'NDimensionArrayAdapter')
        adapter_instance = ABCAdapter.build_adapter(group)
        data = {'param_1': 'some value'}
        # create 3 data types
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        assert count == 1

        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        assert count == 2

        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        array_wrappers, count = self.flow_service.get_available_datatypes(project_id,
                                                                          "tvb.datatypes.arrays.MappedArray")
        assert count == 3

        return array_wrappers

    def _create_operation(self, project_id, algorithm_id):
        """
        dummy operation
        :param project_id: the project in which the operation is created
        :param algorithm_id: the algorithm to be run for the operation
        :return: a dummy `Operation` with the given specifications
        """
        algorithm = dao.get_algorithm_by_id(algorithm_id)
        meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe",
                DataTypeMetaData.KEY_STATE: "RAW_DATA"}
        operation = Operation(self.test_user.id, project_id, algorithm.id, 'test params',
                              meta=json.dumps(meta), status=STATUS_FINISHED)
        return dao.store_entity(operation)

    def _create_datatype_group(self):
        """
        Creates a project, one DataTypeGroup with 2 DataTypes into the new group.
        """
        test_project = TestFactory.create_project(self.test_user, "NewProject")

        all_operations = dao.get_filtered_operations(test_project.id, None, is_count=True)
        assert 0 == all_operations, "There should be no operation."

        datatypes, op_group_id = TestFactory.create_group(self.test_user, test_project)
        dt_group = dao.get_datatypegroup_by_op_group_id(op_group_id)

        return test_project, dt_group.id, datatypes[0], datatypes[1]

    def _create_operations_with_inputs(self, is_group_parent=False):
        """
        Method used for creating a complex tree of operations.

        If 'if_group_parent' is True then a new group will be created and one of its entries it will be used as
        input for the returned operations.
        """
        group_dts, root_op_group_id = TestFactory.create_group(self.test_user, self.test_project)
        if is_group_parent:
            datatype_gid = group_dts[0].gid
        else:
            datatype_gid = TestProjectService._create_value_wrapper(self.test_user, self.test_project)[1]

        parameters = json.dumps({"param_name": datatype_gid})

        ops = []
        for i in range(4):
            ops.append(TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project))
            if i in [1, 3]:
                ops[i].visible = False
            ops[i].parameters = parameters
            ops[i] = dao.store_entity(ops[i])

        # groups
        _, ops_group = TestFactory.create_group(self.test_user, self.test_project)
        ops_group = dao.get_operations_in_group(ops_group)
        assert 2 == len(ops_group)
        ops_group[0].parameters = parameters
        ops_group[0] = dao.store_entity(ops_group[0])
        ops_group[1].visible = False
        ops_group[1].parameters = parameters
        ops_group[1] = dao.store_entity(ops_group[1])

        ops.extend(ops_group)
        if is_group_parent:
            dt_group = dao.get_datatypegroup_by_op_group_id(root_op_group_id)
            return ops, dt_group.id
        return ops, datatype_gid

    def _check_if_datatype_was_removed(self, datatype):
        """
        Check if a certain datatype was removed.
        """
        try:
            dao.get_datatype_by_id(datatype.id)
            raise AssertionError("The datatype was not deleted.")
        except Exception:
            pass
        try:
            dao.get_operation_by_id(datatype.fk_from_operation)
            raise AssertionError("The operation was not deleted.")
        except Exception:
            pass

    def _check_datatype_group_removed(self, datatype_group_id, operation_groupp_id):
        """
        Checks if the DataTypeGroup and OperationGroup was removed.
        """
        try:
            dao.get_generic_entity(DataTypeGroup, datatype_group_id)
            raise AssertionError("The DataTypeGroup entity was not removed.")
        except Exception:
            pass

        try:
            dao.get_operationgroup_by_id(operation_groupp_id)
            raise AssertionError("The OperationGroup entity was not removed.")
        except Exception:
            pass

    def __init_algorithmn(self):
        """
        Insert some starting data in the database.
        """
        categ1 = AlgorithmCategory('one', True)
        self.categ1 = dao.store_entity(categ1)
        ad = Algorithm(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS, categ1.id)
        self.algo_inst = dao.store_entity(ad)

    @staticmethod
    def _create_algo_for_upload():
        """ Creates a fake algorithm for an upload category. """
        category = dao.store_entity(AlgorithmCategory("upload_category", rawinput=True))
        return dao.store_entity(Algorithm("module", "classname", category.id))
class TestProjectStructure(TransactionalTestCase):
    """
    Test ProjectService methods (part related to Project Data Structure).
    """
    def transactional_setup_method(self):
        """
        Prepare before each test.
        """
        self.project_service = ProjectService()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user,
                                                       "ProjectStructure")

        self.relevant_filter = StaticFiltersFactory.build_datatype_filters(
            single_filter=StaticFiltersFactory.RELEVANT_VIEW)
        self.full_filter = StaticFiltersFactory.build_datatype_filters(
            single_filter=StaticFiltersFactory.FULL_VIEW)

    def transactional_teardown_method(self):
        """
        Clear project folders after testing
        """
        self.delete_project_folders()

    def test_set_operation_visibility(self):
        """
        Check if the visibility for an operation is set correct.
        """
        self.__init_algorithmn()
        op1 = Operation(None, self.test_user.id, self.test_project.id,
                        self.algo_inst.id)
        op1 = dao.store_entity(op1)
        assert op1.visible, "The operation should be visible."
        self.project_service.set_operation_and_group_visibility(op1.gid, False)
        updated_op = dao.get_operation_by_id(op1.id)
        assert not updated_op.visible, "The operation should not be visible."

    def test_set_op_and_group_visibility(self, datatype_group_factory):
        """
        When changing the visibility for an operation that belongs to an operation group, we
        should also change the visibility for the entire group of operations.
        """
        group = datatype_group_factory()
        list_of_operations = dao.get_operations_in_group(group.id)
        for operation in list_of_operations:
            assert operation.visible, "The operation should be visible."
        self.project_service.set_operation_and_group_visibility(
            list_of_operations[0].gid, False)
        operations = dao.get_operations_in_group(group.id)
        for operation in operations:
            assert not operation.visible, "The operation should not be visible."

    def test_set_op_group_visibility(self, datatype_group_factory):
        """
        Tests if the visibility for an operation group is set correct.
        """
        group = datatype_group_factory()
        list_of_operations = dao.get_operations_in_group(group.id)
        for operation in list_of_operations:
            assert operation.visible, "The operation should be visible."
        op_group = dao.get_operationgroup_by_id(group.id)
        self.project_service.set_operation_and_group_visibility(
            op_group.gid, False, True)
        operations = dao.get_operations_in_group(group.id)
        for operation in operations:
            assert not operation.visible, "The operation should not be visible."

    def test_is_upload_operation(self):
        """
        Tests that upload and non-upload algorithms are created and run accordingly
        """
        self.__init_algorithmn()
        upload_algo = self._create_algo_for_upload()
        op1 = Operation(None, self.test_user.id, self.test_project.id,
                        self.algo_inst.id)
        op2 = Operation(None, self.test_user.id, self.test_project.id,
                        upload_algo.id)
        operations = dao.store_entities([op1, op2])
        is_upload_operation = self.project_service.is_upload_operation(
            operations[0].gid)
        assert not is_upload_operation, "The operation is not an upload operation."
        is_upload_operation = self.project_service.is_upload_operation(
            operations[1].gid)
        assert is_upload_operation, "The operation is an upload operation."

    def test_get_upload_operations(self):
        """
        Test get_all when filter is for Upload category.
        """
        self.__init_algorithmn()
        upload_algo = self._create_algo_for_upload()

        project = Project("test_proj_2", self.test_user.id, "desc")
        project = dao.store_entity(project)

        op1 = Operation(None, self.test_user.id, self.test_project.id,
                        self.algo_inst.id)
        op2 = Operation(None,
                        self.test_user.id,
                        project.id,
                        upload_algo.id,
                        status=STATUS_FINISHED)
        op3 = Operation(None, self.test_user.id, self.test_project.id,
                        upload_algo.id)
        op4 = Operation(None,
                        self.test_user.id,
                        self.test_project.id,
                        upload_algo.id,
                        status=STATUS_FINISHED)
        op5 = Operation(None,
                        self.test_user.id,
                        self.test_project.id,
                        upload_algo.id,
                        status=STATUS_FINISHED)
        operations = dao.store_entities([op1, op2, op3, op4, op5])

        upload_operations = self.project_service.get_all_operations_for_uploaders(
            self.test_project.id)
        assert 2 == len(
            upload_operations), "Wrong number of upload operations."
        upload_ids = [operation.id for operation in upload_operations]
        for i in [3, 4]:
            assert operations[i].id in upload_ids, \
                "The operation should be an upload operation."
        for i in [0, 1, 2]:
            assert not operations[i].id in upload_ids, \
                "The operation should not be an upload operation."

    def test_is_datatype_group(self, datatype_group_factory):
        """
        Tests if a datatype is group.
        """
        group = datatype_group_factory()
        dt_group = dao.get_generic_entity(DataTypeGroup, group.id)[0]
        is_dt_group = self.project_service.is_datatype_group(dt_group.gid)
        assert is_dt_group, "The datatype should be a datatype group."
        datatypes = dao.get_datatypes_from_datatype_group(dt_group.id)
        is_dt_group = self.project_service.is_datatype_group(datatypes[0].gid)
        assert not is_dt_group, "The datatype should not be a datatype group."

    def test_count_datatypes_in_group(self, datatype_group_factory):
        """ Test that counting dataTypes is correct. Happy flow."""
        group = datatype_group_factory()
        count = dao.count_datatypes_in_group(group.id)
        assert count == group.count_results
        assert count == 6
        datatypes = dao.get_datatypes_from_datatype_group(group.id)
        count = dao.count_datatypes_in_group(datatypes[0].id)
        assert count == 0, "There should be no dataType."

    def test_set_datatype_visibility(self, dummy_datatype_index_factory):
        """
        Check if the visibility for a datatype is set correct.
        """
        # it's a list of 3 elem.
        dummy_dt_index = dummy_datatype_index_factory()
        is_visible = dummy_dt_index.visible
        assert is_visible, "The data type should be visible."

        self.project_service.set_datatype_visibility(dummy_dt_index.gid, False)
        is_visible = dao.get_datatype_by_id(dummy_dt_index.id).visible
        assert not is_visible, "The data type should not be visible."

    def test_set_visibility_for_dt_in_group(self, datatype_group_factory):
        """
        Check if the visibility for a datatype from a datatype group is set correct.
        """
        group = datatype_group_factory()
        datatypes = dao.get_datatypes_from_datatype_group(group.id)
        assert datatypes[0].visible, "The data type should be visible."
        assert datatypes[1].visible, "The data type should be visible."
        self.project_service.set_datatype_visibility(datatypes[0].gid, False)

        db_dt_group = self.project_service.get_datatype_by_id(group.id)
        db_first_dt = self.project_service.get_datatype_by_id(datatypes[0].id)
        db_second_dt = self.project_service.get_datatype_by_id(datatypes[1].id)

        assert not db_dt_group.visible, "The data type should be visible."
        assert not db_first_dt.visible, "The data type should not be visible."
        assert not db_second_dt.visible, "The data type should be visible."

    def test_set_visibility_for_group(self, datatype_group_factory):
        """
        Check if the visibility for a datatype group is set correct.
        """
        group = datatype_group_factory()
        dt_group = dao.get_generic_entity(DataTypeGroup, group.id)[0]
        datatypes = dao.get_datatypes_from_datatype_group(dt_group.id)

        assert dt_group.visible, "The data type group should be visible."
        assert datatypes[0].visible, "The data type should be visible."
        assert datatypes[1].visible, "The data type should be visible."
        self.project_service.set_datatype_visibility(dt_group.gid, False)

        updated_dt_group = self.project_service.get_datatype_by_id(dt_group.id)
        updated_first_dt = self.project_service.get_datatype_by_id(
            datatypes[0].id)
        updated_second_dt = self.project_service.get_datatype_by_id(
            datatypes[1].id)

        assert not updated_dt_group.visible, "The data type group should be visible."
        assert not updated_first_dt.visible, "The data type should be visible."
        assert not updated_second_dt.visible, "The data type should be visible."

    def test_getdatatypes_from_dtgroup(self, datatype_group_factory):
        """
        Validate that we can retrieve all DTs from a DT_Group
        """
        group = datatype_group_factory()
        exp_datatypes = dao.get_datatypes_from_datatype_group(group.id)
        datatypes = self.project_service.get_datatypes_from_datatype_group(
            group.id)
        assert len(
            datatypes
        ) == group.count_results, "There should be 10 datatypes into the datatype group."
        expected_dict = {
            exp_datatypes[0].id: exp_datatypes[0],
            exp_datatypes[1].id: exp_datatypes[1]
        }
        actual_dict = {
            datatypes[0].id: datatypes[0],
            datatypes[1].id: datatypes[1]
        }

        for key in expected_dict:
            expected = expected_dict[key]
            actual = actual_dict[key]
            assert expected.id == actual.id, "Not the same id."
            assert expected.gid == actual.gid, "Not the same gid."
            assert expected.type == actual.type, "Not the same type."
            assert expected.subject == actual.subject, "Not the same subject."
            assert expected.state == actual.state, "Not the same state."
            assert expected.visible == actual.visible, "The datatype visibility is not correct."
            assert expected.module == actual.module, "Not the same module."
            assert expected.user_tag_1 == actual.user_tag_1, "Not the same user_tag_1."
            assert expected.invalid == actual.invalid, "The invalid field value is not correct."
            assert expected.is_nan == actual.is_nan, "The is_nan field value is not correct."

    @pytest.mark.skipif(no_matlab(), reason="Matlab or Octave not installed!")
    def test_get_inputs_for_operation(self):
        """
        Tests method get_datatype_and_datatypegroup_inputs_for_operation.
        Verifies filters' influence over results is as expected
        """
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_66.zip')
        conn = TestFactory.import_zip_connectivity(self.test_user,
                                                   self.test_project, zip_path)
        view_model = BaseBCTModel()
        view_model.connectivity = conn.gid
        adapter = ABCAdapter.build_adapter_from_class(
            TransitivityBinaryDirected)
        result = OperationService().fire_operation(adapter,
                                                   self.test_user,
                                                   self.test_project.id,
                                                   view_model=view_model)

        conn.visible = False
        dao.store_entity(conn)
        operation = dao.get_operation_by_id(result[0].id)

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(
            operation.gid, self.relevant_filter)
        assert len(inputs) == 0

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(
            operation.gid, self.full_filter)
        assert len(inputs) == 1, "Incorrect number of inputs."
        assert conn.id == inputs[0].id, "Retrieved wrong input dataType."

    def test_remove_datatype(self, array_factory):
        """
        Tests the deletion of a datatype.
        """
        # it's a list of 3 elem.
        array_wrappers = array_factory(self.test_project)
        dt_list = []
        for array_wrapper in array_wrappers:
            dt_list.append(dao.get_datatype_by_id(array_wrapper[0]))

        self.project_service.remove_datatype(self.test_project.id,
                                             dt_list[0].gid)
        self._check_if_datatype_was_removed(dt_list[0])

    def test_remove_datatype_from_group(self, datatype_group_factory,
                                        project_factory, user_factory):
        """
        Tests the deletion of a datatype group.
        """
        user = user_factory()
        project = project_factory(user)
        group = datatype_group_factory(project=project)

        datatype_group = dao.get_generic_entity(DataTypeGroup, group.id)[0]
        datatypes = dao.get_datatypes_from_datatype_group(group.id)
        datatype_measure = dao.get_generic_entity(DatatypeMeasureIndex,
                                                  datatypes[0].gid,
                                                  "fk_source_gid")[0]

        # When trying to delete one entity in a group the entire group will be removed
        #  First remove the DTMeasures, to avoid FK failures
        self.project_service.remove_datatype(project.id, datatype_measure.gid)
        self.project_service.remove_datatype(project.id, datatypes[0].gid)
        self._check_if_datatype_was_removed(datatypes[0])
        self._check_if_datatype_was_removed(datatypes[1])
        self._check_if_datatype_was_removed(datatype_group)
        self._check_if_datatype_was_removed(datatype_measure)
        self._check_datatype_group_removed(group.id,
                                           datatype_group.fk_operation_group)

    def test_remove_datatype_group(self, datatype_group_factory,
                                   project_factory, user_factory):
        """
        Tests the deletion of a datatype group.
        """
        user = user_factory()
        project = project_factory(user)
        group = datatype_group_factory(project=project)

        datatype_groups = self.get_all_entities(DataTypeGroup)
        datatypes = dao.get_datatypes_from_datatype_group(group.id)
        assert 2 == len(datatype_groups)

        self.project_service.remove_datatype(project.id,
                                             datatype_groups[1].gid)
        self.project_service.remove_datatype(project.id,
                                             datatype_groups[0].gid)
        self._check_if_datatype_was_removed(datatypes[0])
        self._check_if_datatype_was_removed(datatypes[1])
        self._check_if_datatype_was_removed(datatype_groups[0])
        self._check_if_datatype_was_removed(datatype_groups[1])
        self._check_datatype_group_removed(
            group.id, datatype_groups[0].fk_operation_group)

    @pytest.fixture()
    def array_factory(self, operation_factory, connectivity_index_factory):
        def _create_measure(conn, op, op_dir, project_id):
            conn_measure = ConnectivityMeasure()
            conn_measure.connectivity = h5.load_from_index(conn)
            conn_measure.array_data = numpy.array(conn.number_of_regions)

            conn_measure_db = h5.store_complete(conn_measure, op_dir)
            conn_measure_db.fk_from_operation = op.id
            dao.store_entity(conn_measure_db)

            count = dao.count_datatypes(project_id, DataTypeMatrix)
            return count

        def build(project):
            count = dao.count_datatypes(project.id, DataTypeMatrix)
            assert count == 0

            op = operation_factory(test_project=project)
            conn = connectivity_index_factory(op=op)
            storage_path = FilesHelper().get_project_folder(
                op.project, str(op.id))

            count = _create_measure(conn, op, storage_path, project.id)
            assert count == 1

            count = _create_measure(conn, op, storage_path, project.id)
            assert count == 2

            count = _create_measure(conn, op, storage_path, project.id)
            assert count == 3

            return get_filtered_datatypes(project.id, DataTypeMatrix)[0]

        return build

    def _check_if_datatype_was_removed(self, datatype):
        """
        Check if a certain datatype was removed.
        """
        try:
            dao.get_datatype_by_id(datatype.id)
            raise AssertionError("The datatype was not deleted.")
        except Exception:
            pass
        try:
            dao.get_operation_by_id(datatype.fk_from_operation)
            raise AssertionError("The operation was not deleted.")
        except Exception:
            pass

    def _check_datatype_group_removed(self, datatype_group_id,
                                      operation_groupp_id):
        """
        Checks if the DataTypeGroup and OperationGroup was removed.
        """
        try:
            dao.get_generic_entity(DataTypeGroup, datatype_group_id)
            raise AssertionError("The DataTypeGroup entity was not removed.")
        except Exception:
            pass

        try:
            dao.get_operationgroup_by_id(operation_groupp_id)
            raise AssertionError("The OperationGroup entity was not removed.")
        except Exception:
            pass

    def __init_algorithmn(self):
        """
        Insert some starting data in the database.
        """
        categ1 = AlgorithmCategory('one', True)
        self.categ1 = dao.store_entity(categ1)
        ad = Algorithm(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS,
                       categ1.id)
        self.algo_inst = dao.store_entity(ad)

    @staticmethod
    def _create_algo_for_upload():
        """ Creates a fake algorithm for an upload category. """
        category = dao.store_entity(
            AlgorithmCategory("upload_category", rawinput=True))
        return dao.store_entity(Algorithm("module", "classname", category.id))
示例#6
0
class RemoveTest(TransactionalTestCase):
    """
    This class contains tests for the service layer related to remove of DataTypes.
    """


    def setUp(self):
        """
        Prepare the database before each test.
        """
        self.import_service = ImportService()
        self.flow_service = FlowService()
        self.project_service = ProjectService()
        self.test_user = TestFactory.create_user()

        self.delete_project_folders()
        result = self.count_all_entities(DataType)
        self.assertEqual(0, result, "There should be no data type in DB")
        result = self.count_all_entities(Project)
        self.assertEqual(0, result)

        self.test_project = TestFactory.import_default_project(self.test_user)
        self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
        self.adapter_instance = TestFactory.create_adapter(test_project=self.test_project)


    def tearDown(self):
        """
        Reset the database when test is done.
        """
        self.delete_project_folders()


    def test_remove_used_connectivity(self):
        """
        Tests the remove of a connectivity which is used by other data types
        """
        conn, conn_count = self.flow_service.get_available_datatypes(self.test_project.id, Connectivity)
        count_rm = self.count_all_entities(RegionMapping)
        self.assertEqual(1, conn_count)
        self.assertEqual(1, count_rm)

        conn_gid = conn[0][2]
        try:
            self.project_service.remove_datatype(self.test_project.id, conn_gid)
            self.fail("The connectivity is still used. It should not be possible to remove it." + str(conn_gid))
        except RemoveDataTypeException:
            #OK, do nothing
            pass

        res = dao.get_datatype_by_gid(conn_gid)
        self.assertEqual(conn[0].id, res.id, "Used connectivity removed")


    def test_remove_used_surface(self):
        """
        Tries to remove an used surface
        """
        mapping, mapping_count = self.flow_service.get_available_datatypes(self.test_project.id, RegionMapping)
        self.assertEquals(1, mapping_count, "There should be one Mapping.")
        mapping_gid = mapping[0][2]
        mapping = ABCAdapter.load_entity_by_gid(mapping_gid)
        surface = dao.get_datatype_by_gid(mapping.surface.gid)
        self.assertEqual(surface.gid, mapping.surface.gid, "The surfaces should have the same GID")
        try:
            self.project_service.remove_datatype(self.test_project.id, surface.gid)
            self.fail("The surface should still be used by a RegionMapping " + str(surface.gid))
        except RemoveDataTypeException:
            #OK, do nothing
            pass

        res = dao.get_datatype_by_gid(surface.gid)
        self.assertEqual(surface.id, res.id, "A used surface was deleted")


    def _remove_entity(self, data_class, before_number):
        """
        Try to remove entity. Fail otherwise.
        """
        dts, count = self.flow_service.get_available_datatypes(self.test_project.id, data_class)
        self.assertEquals(count, before_number)
        for dt in dts:
            data_gid = dt[2]
            self.project_service.remove_datatype(self.test_project.id, data_gid)
            res = dao.get_datatype_by_gid(data_gid)
            self.assertEqual(None, res, "The entity was not deleted")


    def test_happyflow_removedatatypes(self):
        """
        Tests the happy flow for the deletion multiple entities.
        They are tested together because they depend on each other and they
        have to be removed in a certain order.
        """
        self._remove_entity(LocalConnectivity, 1)
        self._remove_entity(RegionMapping, 1)
        ### Remove Surfaces
        # SqlAlchemy has no uniform way to retrieve Surface as base (wild-character for polymorphic_identity)
        self._remove_entity(SurfaceData, 4)
        ### Remove a Connectivity
        self._remove_entity(Connectivity, 1)


    def test_remove_time_series(self):
        """
        Tests the happy flow for the deletion of a time series.
        """
        count_ts = self.count_all_entities(TimeSeries)
        self.assertEqual(0, count_ts, "There should be no time series")
        self._create_timeseries()
        series = self.get_all_entities(TimeSeries)
        self.assertEqual(1, len(series), "There should be only one time series")
        self.project_service.remove_datatype(self.test_project.id, series[0].gid)
        res = dao.get_datatype_by_gid(series[0].gid)
        self.assertEqual(None, res, "The time series was not deleted.")


    def test_remove_array_wrapper(self):
        """
        Tests the happy flow for the deletion of an array wrapper.
        """
        count_array = self.count_all_entities(MappedArray)
        self.assertEqual(1, count_array)
        data = {'param_1': 'some value'}
        OperationService().initiate_prelaunch(self.operation, self.adapter_instance, {}, **data)
        array_wrappers = self.get_all_entities(MappedArray)
        self.assertEqual(2, len(array_wrappers))
        array_gid = array_wrappers[0].gid
        self.project_service.remove_datatype(self.test_project.id, array_gid)
        res = dao.get_datatype_by_gid(array_gid)
        self.assertEqual(None, res, "The array wrapper was not deleted.")


    def test_remove_value_wrapper(self):
        """
        Test the deletion of a value wrapper dataType
        """
        count_vals = self.count_all_entities(ValueWrapper)
        self.assertEqual(0, count_vals, "There should be no value wrapper")
        value_wrapper = self._create_value_wrapper()
        self.project_service.remove_datatype(self.test_project.id, value_wrapper.gid)
        res = dao.get_datatype_by_gid(value_wrapper.gid)
        self.assertEqual(None, res, "The value wrapper was not deleted.")


    def _create_timeseries(self):
        """Launch adapter to persist a TimeSeries entity"""
        storage_path = FilesHelper().get_project_folder(self.test_project, str(self.operation.id))

        time_series = TimeSeries()
        time_series.sample_period = 10.0
        time_series.start_time = 0.0
        time_series.storage_path = storage_path
        time_series.write_data_slice(numpy.array([1.0, 2.0, 3.0]))
        time_series.close_file()
        time_series.sample_period_unit = 'ms'

        self._store_entity(time_series, "TimeSeries", "tvb.datatypes.time_series")
        count_ts = self.count_all_entities(TimeSeries)
        self.assertEqual(1, count_ts, "Should be only one TimeSeries")


    def _create_value_wrapper(self):
        """Persist ValueWrapper"""
        value_ = ValueWrapper(data_value=5.0, data_name="my_value")
        self._store_entity(value_, "ValueWrapper", "tvb.datatypes.mapped_values")
        valuew = self.get_all_entities(ValueWrapper)
        self.assertEqual(1, len(valuew), "Should be one value wrapper")
        return ABCAdapter.load_entity_by_gid(valuew[0].gid)


    def _store_entity(self, entity, type_, module):
        """Launch adapter to store a create a persistent DataType."""
        entity.type = type_
        entity.module = module
        entity.subject = "John Doe"
        entity.state = "RAW_STATE"
        entity.set_operation_id(self.operation.id)
        adapter_instance = StoreAdapter([entity])
        OperationService().initiate_prelaunch(self.operation, adapter_instance, {})
class ProjectStructureTest(TransactionalTestCase):
    """
    Test ProjectService methods (part related to Project Data Structure).
    """

    def setUp(self):
        """
        Prepare before each test.
        """
        self.project_service = ProjectService()
        self.flow_service = FlowService()
        self.structure_helper = FilesHelper()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user, "ProjectStructure")

        self.relevant_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.RELEVANT_VIEW)
        self.full_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.FULL_VIEW)

    
    def tearDown(self):
        """
        Clear project folders after testing
        """
        self.delete_project_folders()


    def test_set_operation_visibility(self):
        """
        Check if the visibility for an operation is set correct.
        """
        self.__init_algorithmn()
        op1 = model.Operation(self.test_user.id, self.test_project.id, self.algo_inst.id, "")
        op1 = dao.store_entity(op1)
        self.assertTrue(op1.visible, "The operation should be visible.")
        self.project_service.set_operation_and_group_visibility(op1.gid, False)
        updated_op = dao.get_operation_by_id(op1.id)
        self.assertFalse(updated_op.visible, "The operation should not be visible.")


    def test_set_op_and_group_visibility(self):
        """
        When changing the visibility for an operation that belongs to an operation group, we
        should also change the visibility for the entire group of operations.
        """
        _, group_id = TestFactory.create_group(self.test_user, subject="test-subject-1")
        list_of_operations = dao.get_operations_in_group(group_id)
        for operation in list_of_operations:
            self.assertTrue(operation.visible, "The operation should be visible.")
        self.project_service.set_operation_and_group_visibility(list_of_operations[0].gid, False)
        operations = dao.get_operations_in_group(group_id)
        for operation in operations:
            self.assertFalse(operation.visible, "The operation should not be visible.")


    def test_set_op_group_visibility(self):
        """
        Tests if the visibility for an operation group is set correct.
        """
        _, group_id = TestFactory.create_group(self.test_user, subject="test-subject-1")
        list_of_operations = dao.get_operations_in_group(group_id)
        for operation in list_of_operations:
            self.assertTrue(operation.visible, "The operation should be visible.")
        op_group = dao.get_operationgroup_by_id(group_id)
        self.project_service.set_operation_and_group_visibility(op_group.gid, False, True)
        operations = dao.get_operations_in_group(group_id)
        for operation in operations:
            self.assertFalse(operation.visible, "The operation should not be visible.")


    def test_is_upload_operation(self):
        """
        Tests that upload and non-upload algorithms are created and run accordingly
        """
        self.__init_algorithmn()
        upload_algo = self._create_algo_for_upload()
        op1 = model.Operation(self.test_user.id, self.test_project.id, self.algo_inst.id, "")
        op2 = model.Operation(self.test_user.id, self.test_project.id, upload_algo.id, "")
        operations = dao.store_entities([op1, op2])
        is_upload_operation = self.project_service.is_upload_operation(operations[0].gid)
        self.assertFalse(is_upload_operation, "The operation is not an upload operation.")
        is_upload_operation = self.project_service.is_upload_operation(operations[1].gid)
        self.assertTrue(is_upload_operation, "The operation is an upload operation.")


    def test_get_upload_operations(self):
        """
        Test get_all when filter is for Upload category.
        """
        self.__init_algorithmn()
        upload_algo = self._create_algo_for_upload()

        project = model.Project("test_proj_2", self.test_user.id, "desc")
        project = dao.store_entity(project)

        op1 = model.Operation(self.test_user.id, self.test_project.id, self.algo_inst.id, "")
        op2 = model.Operation(self.test_user.id, project.id, upload_algo.id, "", status=model.STATUS_FINISHED)
        op3 = model.Operation(self.test_user.id, self.test_project.id, upload_algo.id, "")
        op4 = model.Operation(self.test_user.id, self.test_project.id, upload_algo.id, "", status=model.STATUS_FINISHED)
        op5 = model.Operation(self.test_user.id, self.test_project.id, upload_algo.id, "", status=model.STATUS_FINISHED)
        operations = dao.store_entities([op1, op2, op3, op4, op5])

        upload_operations = self.project_service.get_all_operations_for_uploaders(self.test_project.id)
        self.assertEqual(2, len(upload_operations), "Wrong number of upload operations.")
        upload_ids = [operation.id for operation in upload_operations]
        for i in [3, 4]:
            self.assertTrue(operations[i].id in upload_ids,
                            "The operation should be an upload operation.")
        for i in [0, 1, 2]:                    
            self.assertFalse(operations[i].id in upload_ids, 
                             "The operation should not be an upload operation.")


    def test_is_datatype_group(self):
        """
        Tests if a datatype is group.
        """
        _, dt_group_id, first_dt, _ = self._create_datatype_group()
        dt_group = dao.get_generic_entity(model.DataTypeGroup, dt_group_id)[0]
        is_dt_group = self.project_service.is_datatype_group(dt_group.gid)
        self.assertTrue(is_dt_group, "The datatype should be a datatype group.")
        is_dt_group = self.project_service.is_datatype_group(first_dt.gid)
        self.assertFalse(is_dt_group, "The datatype should not be a datatype group.")


    def test_count_datatypes_in_group(self):
        """ Test that counting dataTypes is correct. Happy flow."""
        _, dt_group_id, first_dt, _ = self._create_datatype_group()
        count = dao.count_datatypes_in_group(dt_group_id)
        self.assertEqual(count, 2)
        count = dao.count_datatypes_in_group(first_dt.id)
        self.assertEqual(count, 0, "There should be no dataType.")


    def test_set_datatype_visibility(self):
        """
        Check if the visibility for a datatype is set correct.
        """
        #it's a list of 3 elem.
        mapped_arrays = self._create_mapped_arrays(self.test_project.id)
        for mapped_array in mapped_arrays:
            is_visible = dao.get_datatype_by_id(mapped_array[0]).visible
            self.assertTrue(is_visible, "The data type should be visible.")

        self.project_service.set_datatype_visibility(mapped_arrays[0][2], False)
        for i in range(len(mapped_arrays)):
            is_visible = dao.get_datatype_by_id(mapped_arrays[i][0]).visible
            if not i:
                self.assertFalse(is_visible, "The data type should not be visible.")
            else:
                self.assertTrue(is_visible, "The data type should be visible.")


    def test_set_visibility_for_dt_in_group(self):
        """
        Check if the visibility for a datatype from a datatype group is set correct.
        """
        _, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        self.assertTrue(first_dt.visible, "The data type should be visible.")
        self.assertTrue(second_dt.visible, "The data type should be visible.")
        self.project_service.set_datatype_visibility(first_dt.gid, False)

        db_dt_group = self.project_service.get_datatype_by_id(dt_group_id)
        db_first_dt = self.project_service.get_datatype_by_id(first_dt.id)
        db_second_dt = self.project_service.get_datatype_by_id(second_dt.id)

        self.assertFalse(db_dt_group.visible, "The data type should be visible.")
        self.assertFalse(db_first_dt.visible, "The data type should not be visible.")
        self.assertFalse(db_second_dt.visible, "The data type should be visible.")


    def test_set_visibility_for_group(self):
        """
        Check if the visibility for a datatype group is set correct.
        """
        _, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        dt_group = dao.get_generic_entity(model.DataTypeGroup, dt_group_id)[0]

        self.assertTrue(dt_group.visible, "The data type group should be visible.")
        self.assertTrue(first_dt.visible, "The data type should be visible.")
        self.assertTrue(second_dt.visible, "The data type should be visible.")
        self.project_service.set_datatype_visibility(dt_group.gid, False)

        updated_dt_group = self.project_service.get_datatype_by_id(dt_group_id)
        updated_first_dt = self.project_service.get_datatype_by_id(first_dt.id)
        updated_second_dt = self.project_service.get_datatype_by_id(second_dt.id)

        self.assertFalse(updated_dt_group.visible, "The data type group should be visible.")
        self.assertFalse(updated_first_dt.visible, "The data type should be visible.")
        self.assertFalse(updated_second_dt.visible, "The data type should be visible.")


    def test_getdatatypes_from_dtgroup(self):
        """
        Validate that we can retrieve all DTs from a DT_Group
        """
        _, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        datatypes = self.project_service.get_datatypes_from_datatype_group(dt_group_id)
        self.assertEqual(len(datatypes), 2, "There should be 2 datatypes into the datatype group.")
        expected_dict = {first_dt.id: first_dt, second_dt.id: second_dt}
        actual_dict = {datatypes[0].id: datatypes[0], datatypes[1].id: datatypes[1]}

        for key in expected_dict.keys():
            expected = expected_dict[key]
            actual = actual_dict[key]
            self.assertEqual(expected.id, actual.id, "Not the same id.")
            self.assertEqual(expected.gid, actual.gid, "Not the same gid.")
            self.assertEqual(expected.type, actual.type, "Not the same type.")
            self.assertEqual(expected.subject, actual.subject, "Not the same subject.")
            self.assertEqual(expected.state, actual.state, "Not the same state.")
            self.assertEqual(expected.visible, actual.visible, "The datatype visibility is not correct.")
            self.assertEqual(expected.module, actual.module, "Not the same module.")
            self.assertEqual(expected.user_tag_1, actual.user_tag_1, "Not the same user_tag_1.")
            self.assertEqual(expected.invalid, actual.invalid, "The invalid field value is not correct.")
            self.assertEqual(expected.is_nan, actual.is_nan, "The is_nan field value is not correct.")


    def test_get_operations_for_dt(self):
        """
        Tests method get_operations_for_datatype.
        Verifies result dictionary has the correct values
        """
        created_ops, datatype_gid = self._create_operations_with_inputs()
        operations = self.project_service.get_operations_for_datatype(datatype_gid, self.relevant_filter)
        self.assertEqual(len(operations), 2)
        self.assertTrue(created_ops[0].id in [operations[0].id, operations[1].id], "Retrieved wrong operations.")
        self.assertTrue(created_ops[2].id in [operations[0].id, operations[1].id], "Retrieved wrong operations.")

        operations = self.project_service.get_operations_for_datatype(datatype_gid, self.full_filter)
        self.assertEqual(len(operations), 4)
        ids = [operations[0].id, operations[1].id, operations[2].id, operations[3].id]
        for i in range(4):
            self.assertTrue(created_ops[i].id in ids, "Retrieved wrong operations.")

        operations = self.project_service.get_operations_for_datatype(datatype_gid, self.relevant_filter, True)
        self.assertEqual(len(operations), 1)
        self.assertEqual(created_ops[4].id, operations[0].id, "Incorrect number of operations.")

        operations = self.project_service.get_operations_for_datatype(datatype_gid, self.full_filter, True)
        self.assertEqual(len(operations), 2)
        self.assertTrue(created_ops[4].id in [operations[0].id, operations[1].id], "Retrieved wrong operations.")
        self.assertTrue(created_ops[5].id in [operations[0].id, operations[1].id], "Retrieved wrong operations.")


    def test_get_operations_for_dt_group(self):
        """
        Tests method get_operations_for_datatype_group.
        Verifies filters' influence over results is as expected
        """
        created_ops, dt_group_id = self._create_operations_with_inputs(True)

        ops = self.project_service.get_operations_for_datatype_group(dt_group_id, self.relevant_filter)
        self.assertEqual(len(ops), 2)
        self.assertTrue(created_ops[0].id in [ops[0].id, ops[1].id], "Retrieved wrong operations.")
        self.assertTrue(created_ops[2].id in [ops[0].id, ops[1].id], "Retrieved wrong operations.")

        ops = self.project_service.get_operations_for_datatype_group(dt_group_id, self.full_filter)
        self.assertEqual(len(ops), 4, "Incorrect number of operations.")
        ids = [ops[0].id, ops[1].id, ops[2].id, ops[3].id]
        for i in range(4):
            self.assertTrue(created_ops[i].id in ids, "Retrieved wrong operations.")

        ops = self.project_service.get_operations_for_datatype_group(dt_group_id, self.relevant_filter, True)
        self.assertEqual(len(ops), 1)
        self.assertEqual(created_ops[4].id, ops[0].id, "Incorrect number of operations.")

        ops = self.project_service.get_operations_for_datatype_group(dt_group_id, self.full_filter, True)
        self.assertEqual(len(ops), 2)
        self.assertTrue(created_ops[4].id in [ops[0].id, ops[1].id], "Retrieved wrong operations.")
        self.assertTrue(created_ops[5].id in [ops[0].id, ops[1].id], "Retrieved wrong operations.")


    def test_get_inputs_for_operation(self):
        """
        Tests method get_datatype_and_datatypegroup_inputs_for_operation.
        Verifies filters' influence over results is as expected
        """
        algo = dao.get_algorithm_by_module('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')

        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        ids = []
        for datatype in array_wrappers:
            ids.append(datatype[0])

        datatype = dao.get_datatype_by_id(ids[0])
        datatype.visible = False
        dao.store_entity(datatype)

        parameters = json.dumps({"param_5": "1", "param_1": array_wrappers[0][2],
                                 "param_2": array_wrappers[1][2], "param_3": array_wrappers[2][2], "param_6": "0"})
        operation = model.Operation(self.test_user.id, self.test_project.id, algo.id, parameters)
        operation = dao.store_entity(operation)

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.relevant_filter)
        self.assertEqual(len(inputs), 2)
        self.assertTrue(ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertFalse(ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.full_filter)
        self.assertEqual(len(inputs), 3, "Incorrect number of operations.")
        self.assertTrue(ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.")
        self.assertTrue(ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.")
        self.assertTrue(ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.")

        project, dt_group_id, first_dt, _ = self._create_datatype_group()
        first_dt.visible = False
        dao.store_entity(first_dt)
        parameters = json.dumps({"other_param": "_", "param_1": first_dt.gid})
        operation = model.Operation(self.test_user.id, project.id, algo.id, parameters)
        operation = dao.store_entity(operation)

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.relevant_filter)
        self.assertEqual(len(inputs), 0, "Incorrect number of dataTypes.")
        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertEqual(inputs[0].id, dt_group_id, "Wrong dataType.")
        self.assertTrue(inputs[0].id != first_dt.id, "Wrong dataType.")


    def test_get_inputs_for_op_group(self):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The DataType inputs will be from a DataType group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        first_dt.visible = False
        dao.store_entity(first_dt)
        second_dt.visible = False
        dao.store_entity(second_dt)

        op_group = model.OperationGroup(project.id, "group", "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({"param_5": "1", "param_1": first_dt.gid, "param_6": "2"})
        params_2 = json.dumps({"param_5": "1", "param_4": second_dt.gid, "param_6": "5"})

        algo = dao.get_algorithm_by_module('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')

        op1 = model.Operation(self.test_user.id, project.id, algo.id, params_1, op_group_id=op_group.id)
        op2 = model.Operation(self.test_user.id, project.id, algo.id, params_2, op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 0)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")

        first_dt.visible = True
        dao.store_entity(first_dt)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")


    def test_get_inputs_for_op_group_simple_inputs(self):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The dataType inputs will not be part of a dataType group.
        """
        #it's a list of 3 elem.
        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        array_wrapper_ids = []
        for datatype in array_wrappers:
            array_wrapper_ids.append(datatype[0])

        datatype = dao.get_datatype_by_id(array_wrapper_ids[0])
        datatype.visible = False
        dao.store_entity(datatype)

        op_group = model.OperationGroup(self.test_project.id, "group", "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({"param_5": "2", "param_1": array_wrappers[0][2],
                               "param_2": array_wrappers[1][2], "param_6": "7"})
        params_2 = json.dumps({"param_5": "5", "param_3": array_wrappers[2][2],
                               "param_2": array_wrappers[1][2], "param_6": "6"})

        algo = dao.get_algorithm_by_module('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')

        op1 = model.Operation(self.test_user.id, self.test_project.id, algo.id, params_1, op_group_id=op_group.id)
        op2 = model.Operation(self.test_user.id, self.test_project.id, algo.id, params_2, op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 2)
        self.assertFalse(array_wrapper_ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(array_wrapper_ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(array_wrapper_ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 3, "Incorrect number of dataTypes.")
        self.assertTrue(array_wrapper_ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id])
        self.assertTrue(array_wrapper_ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id])
        self.assertTrue(array_wrapper_ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id])


    def test_remove_datatype(self):
        """
        Tests the deletion of a datatype.
        """
        #it's a list of 3 elem.
        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        dt_list = []
        for array_wrapper in array_wrappers:
            dt_list.append(dao.get_datatype_by_id(array_wrapper[0]))

        self.project_service.remove_datatype(self.test_project.id, dt_list[0].gid)
        self._check_if_datatype_was_removed(dt_list[0])


    def test_remove_datatype_from_group(self):
        """
        Tests the deletion of a datatype group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        datatype_group = dao.get_generic_entity(model.DataTypeGroup, dt_group_id)[0]

        self.project_service.remove_datatype(project.id, first_dt.gid)
        self._check_if_datatype_was_removed(first_dt)
        self._check_if_datatype_was_removed(second_dt)
        self._check_if_datatype_was_removed(datatype_group)
        self._check_datatype_group_removed(dt_group_id, datatype_group.fk_operation_group)


    def test_remove_datatype_group(self):
        """
        Tests the deletion of a datatype group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        datatype_group = dao.get_generic_entity(model.DataTypeGroup, dt_group_id)[0]

        self.project_service.remove_datatype(project.id, datatype_group.gid)
        self._check_if_datatype_was_removed(first_dt)
        self._check_if_datatype_was_removed(second_dt)
        self._check_if_datatype_was_removed(datatype_group)
        self._check_datatype_group_removed(dt_group_id, datatype_group.fk_operation_group)


    def _create_mapped_arrays(self, project_id):
        """
        :param project_id: the project in which the arrays are created
        :return: a list of dummy `MappedArray`
        """
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 0)
        
        group = dao.get_algorithm_by_module('tvb.tests.framework.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter')
        adapter_instance = ABCAdapter.build_adapter(group)
        data = {'param_1': 'some value'}
        #create 3 data types
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 1)
        
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 2)
        
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        array_wrappers, count = self.flow_service.get_available_datatypes(project_id,
                                                                          "tvb.datatypes.arrays.MappedArray")
        self.assertEqual(count, 3)

        return array_wrappers


    def _create_operation(self, project_id, algorithm_id):
        """
        dummy operation
        :param project_id: the project in which the operation is created
        :param algorithm_id: the algorithm to be run for the operation
        :return: a dummy `Operation` with the given specifications
        """
        algorithm = dao.get_algorithm_by_id(algorithm_id)
        meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe",
                DataTypeMetaData.KEY_STATE: "RAW_DATA"}
        operation = model.Operation(self.test_user.id, project_id, algorithm.id, 'test params',
                                    meta=json.dumps(meta), status=model.STATUS_FINISHED)
        return dao.store_entity(operation)


    def _create_datatype_group(self):
        """
        Creates a project, one DataTypeGroup with 2 DataTypes into the new group.
        """
        test_project = TestFactory.create_project(self.test_user, "NewProject")

        all_operations = dao.get_filtered_operations(test_project.id, None, is_count=True)
        self.assertEqual(0, all_operations, "There should be no operation.")
        
        datatypes, op_group_id = TestFactory.create_group(self.test_user, test_project)
        dt_group = dao.get_datatypegroup_by_op_group_id(op_group_id)

        return test_project, dt_group.id, datatypes[0], datatypes[1]



    def _create_operations_with_inputs(self, is_group_parent=False):
        """
        Method used for creating a complex tree of operations.

        If 'if_group_parent' is True then a new group will be created and one of its entries it will be used as
        input for the returned operations.
        """
        group_dts, root_op_group_id = TestFactory.create_group(self.test_user, self.test_project)
        if is_group_parent:
            datatype_gid = group_dts[0].gid
        else:
            datatype_gid = ProjectServiceTest._create_value_wrapper(self.test_user, self.test_project)[1]

        parameters = json.dumps({"param_name": datatype_gid})

        ops = []
        for i in range(4):
            ops.append(TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project))
            if i in [1, 3]:
                ops[i].visible = False
            ops[i].parameters = parameters
            ops[i] = dao.store_entity(ops[i])
            
        #groups
        _, ops_group = TestFactory.create_group(self.test_user, self.test_project)
        ops_group = dao.get_operations_in_group(ops_group)
        self.assertEqual(2, len(ops_group))
        ops_group[0].parameters = parameters
        ops_group[0] = dao.store_entity(ops_group[0])
        ops_group[1].visible = False
        ops_group[1].parameters = parameters
        ops_group[1] = dao.store_entity(ops_group[1])

        ops.extend(ops_group)
        if is_group_parent:
            dt_group = dao.get_datatypegroup_by_op_group_id(root_op_group_id)
            return ops, dt_group.id
        return ops, datatype_gid


    def _check_if_datatype_was_removed(self, datatype):
        """
        Check if a certain datatype was removed.
        """
        try:
            dao.get_datatype_by_id(datatype.id)
            self.fail("The datatype was not deleted.")
        except Exception:
            pass
        try:
            dao.get_operation_by_id(datatype.fk_from_operation)
            self.fail("The operation was not deleted.")
        except Exception:
            pass


    def _check_datatype_group_removed(self, datatype_group_id, operation_groupp_id):
        """
        Checks if the DataTypeGroup and OperationGroup was removed.
        """
        try:
            dao.get_generic_entity(model.DataTypeGroup, datatype_group_id)
            self.fail("The DataTypeGroup entity was not removed.")
        except Exception:
            pass

        try:
            dao.get_operationgroup_by_id(operation_groupp_id)
            self.fail("The OperationGroup entity was not removed.")
        except Exception:
            pass


    def __init_algorithmn(self):
        """
        Insert some starting data in the database.
        """
        categ1 = model.AlgorithmCategory('one', True)
        self.categ1 = dao.store_entity(categ1)
        ad = model.Algorithm(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS, categ1.id)
        self.algo_inst = dao.store_entity(ad)

    @staticmethod
    def _create_algo_for_upload():
        """ Creates a fake algorithm for an upload category. """
        category = dao.store_entity(model.AlgorithmCategory("upload_category", rawinput=True))
        return dao.store_entity(model.Algorithm("module", "classname", category.id))
class ProjectController(BaseController):
    """
    Displays pages which deals with Project data management.
    """

    PRROJECTS_FOR_LINK_KEY = "projectsforlink"
    PRROJECTS_LINKED_KEY = "projectslinked"
    KEY_OPERATION_FILTERS = "operationfilters"

    def __init__(self):
        super(ProjectController, self).__init__()
        self.project_service = ProjectService()


    @expose_page
    @settings
    def index(self):
        """
        Display project main-menu. Choose one project to work with.
        """
        current_project = common.get_current_project()
        if current_project is None:
            raise cherrypy.HTTPRedirect("/project/viewall")
        template_specification = dict(mainContent="project_submenu", title="TVB Project Menu")
        return self.fill_default_attributes(template_specification)


    @expose_page
    @settings
    def viewall(self, create=False, page=1, selected_project_id=None, **_):
        """
        Display all existent projects. Choose one project to work with.
        """
        page = int(page)
        if cherrypy.request.method == 'POST' and create:
            raise cherrypy.HTTPRedirect('/project/editone')
        current_user_id = common.get_logged_user().id

        ## Select project if user choose one.
        if selected_project_id is not None:
            try:
                selected_project = self.project_service.find_project(selected_project_id)
                self._mark_selected(selected_project)
            except ProjectServiceException as excep:
                self.logger.error(excep)
                self.logger.warning("Could not select project: " + str(selected_project_id))
                common.set_error_message("Could not select project: " + str(selected_project_id))

        #Prepare template response
        prjs, pages_no = self.project_service.retrieve_projects_for_user(current_user_id, page)
        template_specification = dict(mainContent="project/viewall", title="Available TVB Projects",
                                      projectsList=prjs, page_number=page, total_pages=pages_no)
        return self.fill_default_attributes(template_specification, 'list')


    @cherrypy.expose
    @handle_error(redirect=True)
    @check_user
    @settings
    def projectupload(self, **data):
        """Upload Project from TVB ZIP."""
        self.logger.debug("Uploading ..." + str(data))
        try:
            upload_param = "uploadedfile"
            if upload_param in data and data[upload_param]:
                import_service = ImportService()
                import_service.import_project_structure(data[upload_param], common.get_logged_user().id)
        except ServicesBaseException as excep:
            self.logger.warning(excep.message)
            common.set_error_message(excep.message)
        raise cherrypy.HTTPRedirect('/project/viewall')


    def _remove_project(self, project_id):
        """Private method for removing project."""
        try:
            self.project_service.remove_project(project_id)
        except ServicesBaseException as exc:
            self.logger.error("Could not delete project!")
            self.logger.exception(exc)
            common.set_error_message(exc.message)
        prj = common.get_current_project()
        if prj is not None and prj.id == int(project_id):
            common.remove_from_session(common.KEY_PROJECT)


    def _persist_project(self, data, project_id, is_create, current_user):
        """Private method to persist"""
        data = EditForm().to_python(data)
        saved_project = self.project_service.store_project(current_user, is_create, project_id, **data)
        selected_project = common.get_current_project()
        if len(self.project_service.retrieve_projects_for_user(current_user.id, 1)) == 1:
            selected_project = saved_project
        if selected_project is None or (saved_project.id == selected_project.id):
            self._mark_selected(saved_project)


    @expose_page
    @settings
    def editone(self, project_id=None, cancel=False, save=False, delete=False, **data):
        """
        Create or change Project. When project_id is empty we create a 
        new entity, otherwise we are to edit and existent one.
        """
        if cherrypy.request.method == 'POST' and cancel:
            raise cherrypy.HTTPRedirect('/project')
        if cherrypy.request.method == 'POST' and delete:
            self._remove_project(project_id)
            raise cherrypy.HTTPRedirect('/project/viewall')

        current_user = common.get_logged_user()
        is_create = False
        if project_id is None or not int(project_id):
            is_create = True
            data["administrator"] = current_user.username
        else:
            current_project = self.project_service.find_project(project_id)
            if not save:
                # Only when we do not have submitted data,
                # populate fields with initial values for edit.
                data = dict(name=current_project.name, description=current_project.description)
            data["administrator"] = current_project.administrator.username
            self._mark_selected(current_project)
        data["project_id"] = project_id

        template_specification = dict(mainContent="project/editone", data=data, isCreate=is_create,
                                      title="Create new project" if is_create else "Edit " + data["name"],
                                      editUsersEnabled=(current_user.username == data['administrator']))
        try:
            if cherrypy.request.method == 'POST' and save:
                common.remove_from_session(common.KEY_PROJECT)
                common.remove_from_session(common.KEY_CACHED_SIMULATOR_TREE)
                self._persist_project(data, project_id, is_create, current_user)
                raise cherrypy.HTTPRedirect('/project/viewall')
        except formencode.Invalid as excep:
            self.logger.debug(str(excep))
            template_specification[common.KEY_ERRORS] = excep.unpack_errors()
        except ProjectServiceException as excep:
            self.logger.debug(str(excep))
            common.set_error_message(excep.message)
            raise cherrypy.HTTPRedirect('/project/viewall')

        all_users, members, pages = self.user_service.get_users_for_project(current_user.username, project_id)
        template_specification['usersList'] = all_users
        template_specification['usersMembers'] = [m.id for m in members]
        template_specification['usersPages'] = pages
        template_specification['usersCurrentPage'] = 1
        return self.fill_default_attributes(template_specification, 'properties')


    @expose_fragment('project/project_members')
    def getmemberspage(self, page, project_id=None):
        """Retrieve a new page of Project members."""
        current_name = common.get_logged_user().username
        all_users, members, _ = self.user_service.get_users_for_project(current_name, project_id, int(page))
        edit_enabled = True
        if project_id is not None:
            current_project = self.project_service.find_project(project_id)
            edit_enabled = (current_name == current_project.administrator.username)
        return dict(usersList=all_users, usersMembers=[m.id for m in members],
                    usersCurrentPage=page, editUsersEnabled=edit_enabled)


    @expose_json
    def set_visibility(self, entity_type, entity_gid, to_de_relevant):
        """
        Method used for setting the relevancy/visibility on a DataType(Group)/Operation(Group.
        """
        to_de_relevant = string2bool(to_de_relevant)
        is_operation, is_group = False, False
        if entity_type == graph_structures.NODE_OPERATION_TYPE:
            is_group = False
            is_operation = True
        elif entity_type == graph_structures.NODE_OPERATION_GROUP_TYPE:
            is_group = True
            is_operation = True

        if is_operation:
            self.project_service.set_operation_and_group_visibility(entity_gid, to_de_relevant, is_group)
        else:
            self.project_service.set_datatype_visibility(entity_gid, to_de_relevant)


    @expose_page
    @settings
    def viewoperations(self, project_id=None, page=1, filtername=None, reset_filters=None):
        """
        Display table of operations for a given project selected
        """
        if (project_id is None) or (not int(project_id)):
            raise cherrypy.HTTPRedirect('/project')

        ## Toggle filters
        filters = self.__get_operations_filters()
        selected_filters = None
        for my_filter in filters:
            if cherrypy.request.method == 'POST' and (filtername is not None):
                if reset_filters:
                    my_filter.selected = False
                elif my_filter.display_name == filtername:
                    my_filter.selected = not my_filter.selected
            if my_filter.selected:
                selected_filters = my_filter + selected_filters
        ## Iterate one more time, to update counters
        for my_filter in filters:
            if not my_filter.selected:
                new_count = self.project_service.count_filtered_operations(project_id, my_filter + selected_filters)
                my_filter.passes_count = new_count
            else:
                my_filter.passes_count = ''

        page = int(page)
        project, total_op_count, filtered_ops, pages_no = self.project_service.retrieve_project_full(
            project_id, selected_filters, page)
        ## Select current project
        self._mark_selected(project)

        template_specification = dict(mainContent="project/viewoperations", project=project,
                                      title='Past operations for " ' + project.name + '"', operationsList=filtered_ops,
                                      total_op_count=total_op_count, total_pages=pages_no, page_number=page,
                                      filters=filters, no_filter_selected=(selected_filters is None), model=model)
        return self.fill_default_attributes(template_specification, 'operations')
    
    
    @expose_fragment("call_out_project")
    def generate_call_out_control(self):
        """
        Returns the content of a confirmation dialog, with a given question. 
        """
        self.update_operations_count()
        return {'selectedProject': common.get_current_project()}


    def __get_operations_filters(self):
        """
        Filters for VIEW_ALL_OPERATIONS page.
        Get from session currently selected filters, or build a new set of filters.
        """
        session_filtes = common.get_from_session(self.KEY_OPERATION_FILTERS)
        if session_filtes:
            return session_filtes

        else:
            sim_group = self.flow_service.get_algorithm_by_module_and_class(IntrospectionRegistry.SIMULATOR_MODULE,
                                                                            IntrospectionRegistry.SIMULATOR_CLASS)
            new_filters = StaticFiltersFactory.build_operations_filters(sim_group, common.get_logged_user().id)
            common.add2session(self.KEY_OPERATION_FILTERS, new_filters)
            return new_filters


    @expose_fragment("overlay_confirmation")
    def show_confirmation_overlay(self, **data):
        """
        Returns the content of a confirmation dialog, with a given question.
        """
        if not data:
            data = {}
        question = data.get('question', "Are you sure ?")
        data['question'] = question
        return self.fill_default_attributes(data)
    

    @expose_fragment("overlay")
    def get_datatype_details(self, entity_gid, back_page='null', exclude_tabs=None):
        """
        Returns the HTML which contains the details for the given dataType.
        :param back_page: if different from 'null' (the default) it will redirect to it after saving metedata changes
        """
        if exclude_tabs is None:
            exclude_tabs = []
        selected_project = common.get_current_project()
        datatype_details, states, entity = self.project_service.get_datatype_details(entity_gid)

        ### Load DataType categories
        current_type = datatype_details.data_type
        datatype_gid = datatype_details.gid
        categories = {}
        if not entity.invalid:
            categories = self.flow_service.get_launchable_algorithms(datatype_gid)

        is_group = False
        if datatype_details.operation_group_id is not None:
            ## Is a DataTypeGroup
            is_group = True

        ### Retrieve links
        linkable_projects_dict = self._get_linkable_projects_dict(entity.id)
        ### Load all exporters
        exporters = {}
        if not entity.invalid:
            exporters = ExportManager().get_exporters_for_data(entity)
        is_relevant = entity.visible

        template_specification = {"entity_gid": entity_gid,
                                  "nodeFields": datatype_details.get_ui_fields(),
                                  "allStates": states,
                                  "project": selected_project,
                                  "categories": categories,
                                  "exporters": exporters,
                                  "datatype_id": entity.id,
                                  "isGroup": is_group,
                                  "isRelevant": is_relevant,
                                  "nodeType": 'datatype',
                                  "backPageIdentifier": back_page}
        template_specification.update(linkable_projects_dict)

        overlay_class = "can-browse editor-node node-type-" + str(current_type).lower()
        if is_relevant:
            overlay_class += " node-relevant"
        else:
            overlay_class += " node_irrelevant"
        overlay_title = current_type
        if datatype_details.datatype_tag_1:
            overlay_title += " " + datatype_details.datatype_tag_1

        tabs = []
        overlay_indexes = []
        if "Metadata" not in exclude_tabs:
            tabs.append(OverlayTabDefinition("Metadata", "metadata"))
            overlay_indexes.append(0)
        if "Analyzers" not in exclude_tabs:
            tabs.append(OverlayTabDefinition("Analyzers", "analyzers", enabled=categories and 'Analyze' in categories))
            overlay_indexes.append(1)
        if "Visualizers" not in exclude_tabs:
            tabs.append(OverlayTabDefinition("Visualizers", "visualizers", enabled=categories and 'View' in categories))
            overlay_indexes.append(2)

        enable_link_tab = False
        if (not entity.invalid) and (linkable_projects_dict is not None):
            projects_for_link = linkable_projects_dict.get(self.PRROJECTS_FOR_LINK_KEY)
            if projects_for_link is not None and len(projects_for_link) > 0:
                enable_link_tab = True
            projects_linked = linkable_projects_dict.get(self.PRROJECTS_LINKED_KEY)
            if projects_linked is not None and len(projects_linked) > 0:
                enable_link_tab = True
        if "Links" not in exclude_tabs:
            tabs.append(OverlayTabDefinition("Links", "link_to", enabled=enable_link_tab))
            overlay_indexes.append(3)
        if "Export" not in exclude_tabs:
            tabs.append(OverlayTabDefinition("Export", "export", enabled=(exporters and len(exporters) > 0)))
            overlay_indexes.append(4)
        if "Derived DataTypes" not in exclude_tabs:
            tabs.append(OverlayTabDefinition("Derived DataTypes", "result_dts",
                                             enabled=self.project_service.count_datatypes_generated_from(entity_gid)))
            overlay_indexes.append(5)
        template_specification = self.fill_overlay_attributes(template_specification, "DataType Details",
                                                              overlay_title, "project/details_datatype_overlay",
                                                              overlay_class, tabs, overlay_indexes)
        template_specification['baseUrl'] = TvbProfile.current.web.BASE_URL
        return FlowController().fill_default_attributes(template_specification)


    @expose_fragment('project/linkable_projects')
    def get_linkable_projects(self, datatype_id, is_group, entity_gid):
        """
        Returns the HTML which displays the link-able projects for the given dataType
        """
        template_specification = self._get_linkable_projects_dict(datatype_id)
        template_specification["entity_gid"] = entity_gid
        template_specification["isGroup"] = is_group
        return template_specification


    def _get_linkable_projects_dict(self, datatype_id):
        """" UI ready dictionary with projects in which current DataType can be linked."""
        self.logger.debug("Searching projects to link for DT " + str(datatype_id))
        for_link, linked = self.project_service.get_linkable_projects_for_user(common.get_logged_user().id, datatype_id)

        projects_for_link, linked_projects = None, None
        if for_link:
            projects_for_link = {}
            for project in for_link:
                projects_for_link[project.id] = project.name

        if linked:
            linked_projects = {}
            for project in linked:
                linked_projects[project.id] = project.name

        template_specification = {self.PRROJECTS_FOR_LINK_KEY: projects_for_link,
                                  self.PRROJECTS_LINKED_KEY: linked_projects,
                                  "datatype_id": datatype_id}
        return template_specification


    @expose_fragment("overlay")
    def get_operation_details(self, entity_gid, is_group=False, back_page='burst'):
        """
        Returns the HTML which contains the details for the given operation.
        """
        if string2bool(str(is_group)):
            ### we have an OperationGroup entity.
            template_specification = self._compute_operation_details(entity_gid, True)
            #I expect that all the operations from a group are visible or not
            template_specification["nodeType"] = graph_structures.NODE_OPERATION_GROUP_TYPE

        else:
            ### we have a simple Operation
            template_specification = self._compute_operation_details(entity_gid)
            template_specification["displayRelevantButton"] = True
            template_specification["nodeType"] = graph_structures.NODE_OPERATION_TYPE

        template_specification["backPageIdentifier"] = back_page
        overlay_class = "can-browse editor-node node-type-" + template_specification["nodeType"]
        if template_specification["isRelevant"]:
            overlay_class += " node-relevant"
        else:
            overlay_class += " node_irrelevant"

        template_specification = self.fill_overlay_attributes(template_specification, "Details", "Operation",
                                                              "project/details_operation_overlay", overlay_class)
        return FlowController().fill_default_attributes(template_specification)


    def _compute_operation_details(self, entity_gid, is_group=False):
        """
        Returns a dictionary which contains the details for the given operation.
        """
        selected_project = common.get_current_project()
        op_details = self.project_service.get_operation_details(entity_gid, is_group)
        operation_id = op_details.operation_id

        display_reload_btn = True
        operation = self.flow_service.load_operation(operation_id)

        if (operation.fk_operation_group is not None) or (operation.burst is not None):
            display_reload_btn = False
        else:
            op_categ_id = operation.algorithm.fk_category
            raw_categories = self.flow_service.get_raw_categories()
            for category in raw_categories:
                if category.id == op_categ_id:
                    display_reload_btn = False
                    break

        template_specification = {"entity_gid": entity_gid,
                                  "nodeFields": op_details.get_ui_fields(),
                                  "operationId": operation_id,
                                  "displayReloadBtn": display_reload_btn,
                                  "project": selected_project,
                                  "isRelevant": operation.visible}
        return template_specification


    def get_project_structure_grouping(self):
        user = common.get_logged_user()
        return user.get_project_structure_grouping()


    def set_project_structure_grouping(self, first, second):
        user = common.get_logged_user()
        user.set_project_structure_grouping(first, second)
        self.user_service.edit_user(user)

    @expose_page
    @settings
    def editstructure(self, project_id=None, last_selected_tab="treeTab", first_level=None,
                      second_level=None, filter_input="", visibility_filter=None, **_ignored):
        """
        Return the page skeleton for displaying the project structure.
        """
        try:
            int(project_id)
        except (ValueError, TypeError):
            raise cherrypy.HTTPRedirect('/project')

        if first_level is None or second_level is None:
            first_level, second_level = self.get_project_structure_grouping()

        selected_project = self.project_service.find_project(project_id)
        self._mark_selected(selected_project)
        data = self.project_service.get_filterable_meta()
        filters = StaticFiltersFactory.build_datatype_filters(selected=visibility_filter)
        template_specification = dict(mainContent="project/structure", baseUrl=TvbProfile.current.web.BASE_URL,
                                      title=selected_project.name,
                                      project=selected_project, data=data,
                                      lastSelectedTab=last_selected_tab, firstLevelSelection=first_level,
                                      secondLevelSelection=second_level, filterInputValue=filter_input, filters=filters)
        return self.fill_default_attributes(template_specification, 'data')


    @expose_fragment("overlay")
    def get_data_uploader_overlay(self, project_id):
        """
        Returns the html which displays a dialog which allows the user
        to upload certain data into the application.
        """
        upload_algorithms = self.flow_service.get_upload_algorithms()

        flow_controller = FlowController()
        algorithms_interface = {}
        tabs = []

        for algorithm in upload_algorithms:
            adapter_template = flow_controller.get_adapter_template(project_id, algorithm.id, True, None)
            algorithms_interface['template_for_algo_' + str(algorithm.id)] = adapter_template
            tabs.append(OverlayTabDefinition(algorithm.displayname, algorithm.subsection_name,
                                             description=algorithm.description))

        template_specification = self.fill_overlay_attributes(None, "Upload", "Upload data for this project",
                                                              "project/upload_data_overlay", "dialog-upload",
                                                              tabs_vertical=tabs)
        template_specification['uploadAlgorithms'] = upload_algorithms
        template_specification['projectId'] = project_id
        template_specification['algorithmsInterface'] = algorithms_interface

        return flow_controller.fill_default_attributes(template_specification)


    @expose_fragment("overlay")
    def get_project_uploader_overlay(self):
        """
        Returns the html which displays a dialog which allows the user
        to upload an entire project.
        """
        template_specification = self.fill_overlay_attributes(None, "Upload", "Project structure",
                                                              "project/upload_project_overlay", "dialog-upload")

        return FlowController().fill_default_attributes(template_specification)


    @expose_page
    def launchloader(self, project_id, algorithm_id, cancel=False, **data):
        """ 
        Start Upload mechanism
        """
        success_link = "/project/editstructure/" + str(project_id)
        # do not allow GET
        if cherrypy.request.method != 'POST' or cancel:
            raise cherrypy.HTTPRedirect(success_link)
        try:
            int(project_id)
            int(algorithm_id)
        except (ValueError, TypeError):
            raise cherrypy.HTTPRedirect(success_link)

        project = self.project_service.find_project(project_id)
        algorithm = self.flow_service.get_algorithm_by_identifier(algorithm_id)
        FlowController().execute_post(project.id, success_link, algorithm.fk_category, algorithm, **data)

        raise cherrypy.HTTPRedirect(success_link)


    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    def readjsonstructure(self, project_id, visibility_filter=StaticFiltersFactory.FULL_VIEW,
                          first_level=None, second_level=None, filter_value=None):
        """
        AJAX exposed method. 
        Will return the complete JSON for Project's structure, or filtered tree
        (filter only Relevant entities or Burst only Data).
        """
        if first_level is None or second_level is None:
            first_level, second_level = self.get_project_structure_grouping()
        else:
            self.set_project_structure_grouping(first_level, second_level)

        selected_filter = StaticFiltersFactory.build_datatype_filters(single_filter=visibility_filter)

        project = self.project_service.find_project(project_id)
        json_structure = self.project_service.get_project_structure(project, selected_filter,
                                                                    first_level, second_level, filter_value)
        # This JSON encoding is necessary, otherwise we will get an error
        # from JSTree library while trying to load with AJAX 
        # the content of the tree.     
        encoder = JSONEncoder()
        return encoder.iterencode(json_structure)


    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    def createlink(self, link_data, project_id, is_group):
        """
        Delegate the creation of the actual link to the flow service.
        """
        if not string2bool(str(is_group)):
            self.flow_service.create_link([link_data], project_id)
        else:
            all_data = self.project_service.get_datatype_in_group(link_data)
            # Link all Dts in group and the DT_Group entity
            data_ids = [data.id for data in all_data]
            data_ids.append(int(link_data))
            self.flow_service.create_link(data_ids, project_id)


    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    def removelink(self, link_data, project_id, is_group):
        """
        Delegate the creation of the actual link to the flow service.
        """
        if not string2bool(str(is_group)):
            self.flow_service.remove_link(link_data, project_id)
        else:
            all_data = self.project_service.get_datatype_in_group(link_data)
            for data in all_data:
                self.flow_service.remove_link(data.id, project_id)
            self.flow_service.remove_link(int(link_data), project_id)


    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    def noderemove(self, project_id, node_gid):
        """
        AJAX exposed method, to execute operation of data removal.
        """
        try:
            if node_gid is None:
                return "Remove can only be applied on a Node with GID!"
            self.logger.debug("Removing data with GID=" + str(node_gid))
            self.project_service.remove_datatype(project_id, node_gid)
        except RemoveDataTypeException as excep:
            self.logger.exception("Could not execute operation Node Remove!")
            return excep.message
        except ServicesBaseException as excep:
            self.logger.exception("Could not execute operation Node Remove!")
            return excep.message
        return None


    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    def updatemetadata(self, **data):
        """ Submit MetaData edited for DataType(Group) or Operation(Group). """
        try:

            self.project_service.update_metadata(data)

        except ServicesBaseException as excep:
            self.logger.error("Could not execute MetaData update!")
            self.logger.exception(excep)
            common.set_error_message(excep.message)
            return excep.message


    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    def downloaddata(self, data_gid, export_module):
        """ Export the data to a default path of TVB_STORAGE/PROJECTS/project_name """
        current_prj = common.get_current_project()
        # Load data by GID
        entity = ABCAdapter.load_entity_by_gid(data_gid)
        # Do real export
        export_mng = ExportManager()
        file_name, file_path, delete_file = export_mng.export_data(entity, export_module, current_prj)
        if delete_file:
            # We force parent folder deletion because export process generated it.
            self.mark_file_for_delete(file_path, True)

        self.logger.debug("Data exported in file: " + str(file_path))
        return serve_file(file_path, "application/x-download", "attachment", file_name)


    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    def downloadproject(self, project_id):
        """
        Export the data from a whole project.
        """
        current_project = self.project_service.find_project(project_id)
        export_mng = ExportManager()
        export_file = export_mng.export_project(current_project)

        # Register export file for delete when download complete
        # We force parent folder deletion because export process generated it.
        self.mark_file_for_delete(export_file, True)

        return serve_file(export_file, "application/x-download", "attachment")


    #methods related to data structure - graph

    @expose_json
    def create_json(self, item_gid, item_type, visibility_filter):
        """
        Method used for creating a JSON representation of a graph.
        """
        selected_filter = StaticFiltersFactory.build_datatype_filters(single_filter=visibility_filter)
        project = common.get_current_project()

        is_upload_operation = (item_type == graph_structures.NODE_OPERATION_TYPE) and \
                              (self.project_service.is_upload_operation(item_gid) or item_gid == "firstOperation")
        if is_upload_operation:
            graph_branches = []
            uploader_operations = self.project_service.get_all_operations_for_uploaders(project.id)
            for operation in uploader_operations:
                dt_outputs = self.project_service.get_results_for_operation(operation.id, selected_filter)
                dt_outputs = self._create_datatype_nodes(dt_outputs)
                parent_op = self._create_operation_nodes([operation], item_gid)
                branch = graph_structures.GraphComponent([], parent_op, dt_outputs, [])
                graph_branches.append(branch)
            graph = graph_structures.FullGraphStructure(graph_branches)
            return graph.prepare_for_json()

        dt_inputs, parent_op, dt_outputs, op_inputs = [], [], [], []
        if item_type == graph_structures.NODE_OPERATION_TYPE:
            dt_inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(item_gid, selected_filter)
            parent_op = self.project_service.load_operation_by_gid(item_gid)
            dt_outputs = self.project_service.get_results_for_operation(parent_op.id, selected_filter)
            #create graph nodes
            dt_inputs, parent_op, dt_outputs, op_inputs = self._create_nodes(dt_inputs, [parent_op],
                                                                             dt_outputs, [], item_gid)

        elif item_type == graph_structures.NODE_OPERATION_GROUP_TYPE:
            parent_op_group = self.project_service.get_operation_group_by_gid(item_gid)
            dt_inputs = self.project_service.get_datatypes_inputs_for_operation_group(parent_op_group.id,
                                                                                      selected_filter)
            datatype_group = self.project_service.get_datatypegroup_by_op_group_id(parent_op_group.id)
            datatype = self.project_service.get_datatype_by_id(datatype_group.id)

            dt_inputs = self._create_datatype_nodes(dt_inputs)
            parent_op = graph_structures.NodeStructure.build_structure_for_operation_group(parent_op_group.gid)
            parent_op.selected = True
            parent_op = [parent_op]
            if selected_filter.display_name == StaticFiltersFactory.RELEVANT_VIEW and datatype.visible is False:
                dt_outputs = []
            else:
                dt_outputs = self._create_datatype_nodes([datatype])

        elif item_type == graph_structures.NODE_DATATYPE_TYPE:
            selected_dt = ABCAdapter.load_entity_by_gid(item_gid)
            if self.project_service.is_datatype_group(item_gid):
                datatype_group = self.project_service.get_datatypegroup_by_gid(selected_dt.gid)
                parent_op_group = self.project_service.get_operation_group_by_id(datatype_group.fk_operation_group)
                dt_inputs = self.project_service.get_datatypes_inputs_for_operation_group(parent_op_group.id,
                                                                                          selected_filter)
                op_inputs = self.project_service.get_operations_for_datatype_group(selected_dt.id, selected_filter)
                op_inputs_in_groups = self.project_service.get_operations_for_datatype_group(selected_dt.id,
                                                                                             selected_filter,
                                                                                             only_in_groups=True)
                #create graph nodes
                dt_inputs, parent_op, dt_outputs, op_inputs = self._create_nodes(dt_inputs, [], [selected_dt],
                                                                                 op_inputs, item_gid)
                parent_op = [graph_structures.NodeStructure.build_structure_for_operation_group(parent_op_group.gid)]
                op_inputs_in_groups = self._create_operation_group_nodes(op_inputs_in_groups)
                op_inputs.extend(op_inputs_in_groups)
            else:
                parent_op = self.flow_service.load_operation(selected_dt.fk_from_operation)
                dt_inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(parent_op.gid,
                                                                                               selected_filter)
                op_inputs = self.project_service.get_operations_for_datatype(selected_dt.gid, selected_filter)
                op_inputs_in_groups = self.project_service.get_operations_for_datatype(selected_dt.gid, selected_filter,
                                                                                       only_in_groups=True)
                dt_outputs = self.project_service.get_results_for_operation(parent_op.id, selected_filter)
                #create graph nodes
                dt_inputs, parent_op, dt_outputs, op_inputs = self._create_nodes(dt_inputs, [parent_op], dt_outputs,
                                                                                 op_inputs, item_gid)
                op_inputs_in_groups = self._create_operation_group_nodes(op_inputs_in_groups)
                op_inputs.extend(op_inputs_in_groups)

        else:
            self.logger.error("Invalid item type: " + str(item_type))
            raise Exception("Invalid item type.")

        branch = graph_structures.GraphComponent(dt_inputs, parent_op, dt_outputs, op_inputs)
        graph = graph_structures.FullGraphStructure([branch])
        return graph.prepare_for_json()


    def _create_nodes(self, dt_inputs, parent_op, dt_outputs, op_inputs, item_gid=None):
        """Expected a list of DataTypes, Parent Operation, Outputs, and returns NodeStructure entities."""
        dt_inputs = self._create_datatype_nodes(dt_inputs, item_gid)
        parent_op = self._create_operation_nodes(parent_op, item_gid)
        dt_outputs = self._create_datatype_nodes(dt_outputs, item_gid)
        op_inputs = self._create_operation_nodes(op_inputs, item_gid)
        return dt_inputs, parent_op, dt_outputs, op_inputs


    @staticmethod
    def _create_datatype_nodes(datatypes_list, selected_item_gid=None):
        """ Expects a list of DataTypes and returns a list of NodeStructures """
        nodes = []
        if datatypes_list is None:
            return nodes
        for data_type in datatypes_list:
            node = graph_structures.NodeStructure.build_structure_for_datatype(data_type.gid)
            if data_type.gid == selected_item_gid:
                node.selected = True
            nodes.append(node)
        return nodes


    @staticmethod
    def _create_operation_nodes(operations_list, selected_item_gid=None):
        """
        Expects a list of operations and returns a list of NodeStructures
        """
        nodes = []
        for operation in operations_list:
            node = graph_structures.NodeStructure.build_structure_for_operation(operation)
            if operation.gid == selected_item_gid:
                node.selected = True
            nodes.append(node)
        return nodes


    def _create_operation_group_nodes(self, operations_list, selected_item_gid=None):
        """
        Expects a list of operations that are part of some operation groups.
        """
        groups = dict()
        for operation in operations_list:
            if operation.fk_operation_group not in groups:
                group = self.project_service.get_operation_group_by_id(operation.fk_operation_group)
                groups[group.id] = group.gid
        nodes = []
        for _, group in groups.items():
            node = graph_structures.NodeStructure.build_structure_for_operation_group(group)
            if group == selected_item_gid:
                node.selected = True
            nodes.append(node)
        return nodes


    def fill_default_attributes(self, template_dictionary, subsection='project'):
        """
        Overwrite base controller to add required parameters for adapter templates.
        """
        template_dictionary[common.KEY_SECTION] = 'project'
        template_dictionary[common.KEY_SUB_SECTION] = subsection
        template_dictionary[common.KEY_INCLUDE_RESOURCES] = 'project/included_resources'
        BaseController.fill_default_attributes(self, template_dictionary)
        return template_dictionary
示例#9
0
class TestRemove(TransactionalTestCase):
    """
    This class contains tests for the service layer related to remove of DataTypes.
    """

    def transactional_setup_method(self):
        """
        Prepare the database before each test.
        """
        self.project_service = ProjectService()
        self.test_user = TestFactory.create_user()

        self.delete_project_folders()
        result = self.count_all_entities(DataType)
        assert 0 == result, "There should be no data type in DB"
        result = self.count_all_entities(Project)
        assert 0 == result

        self.test_project = TestFactory.import_default_project(self.test_user)
        self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)

    def transactional_teardown_method(self):
        """
        Reset the database when test is done.
        """
        self.delete_project_folders()

    def test_remove_used_connectivity(self):
        """
        Tests the remove of a connectivity which is used by other data types
        TODO: TVB-2688
        """
        conn = try_get_last_datatype(self.test_project.id, ConnectivityIndex)
        assert conn is not None
        conn_gid = conn.gid
        count_rm = self.count_all_entities(RegionMappingIndex)
        assert 1 == count_rm

        try:
            self.project_service.remove_datatype(self.test_project.id, conn.gid)
            raise AssertionError(
                "The connectivity is still used. It should not be possible to remove it." + str(conn_gid))
        except RemoveDataTypeException:
            # OK, do nothing
            pass

        res = dao.get_datatype_by_gid(conn_gid)
        assert conn.id == res.id, "Used connectivity removed"

    def test_remove_used_surface(self):
        """
        Tries to remove an used surface
        """
        filter = FilterChain(fields=[FilterChain.datatype + '.surface_type'], operations=["=="], values=[CORTICAL])
        mapping = try_get_last_datatype(self.test_project.id, RegionMappingIndex)
        surface = try_get_last_datatype(self.test_project.id, SurfaceIndex, filter)
        assert mapping is not None, "There should be one Mapping."
        assert surface is not None, "There should be one Costical Surface."
        assert surface.gid == mapping.fk_surface_gid, "The surfaces should have the same GID"

        try:
            self.project_service.remove_datatype(self.test_project.id, surface.gid)
            raise AssertionError("The surface should still be used by a RegionMapping " + str(surface.gid))
        except RemoveDataTypeException:
            # OK, do nothing
            pass

        res = dao.get_datatype_by_gid(surface.gid)
        assert surface.id == res.id, "A used surface was deleted"

    def _remove_entity(self, data_class, before_number):
        """
        Try to remove entity. Fail otherwise.
        """
        dts, count = get_filtered_datatypes(self.test_project.id, data_class)
        assert count == before_number
        for dt in dts:
            data_gid = dt[2]
            self.project_service.remove_datatype(self.test_project.id, data_gid)
            res = dao.get_datatype_by_gid(data_gid)
            assert res is None, "The entity was not deleted"

        _, count = get_filtered_datatypes(self.test_project.id, data_class)
        assert 0 == count

    def test_happyflow_removedatatypes(self):
        """
        Tests the happy flow for the deletion multiple entities.
        They are tested together because they depend on each other and they
        have to be removed in a certain order.
        """
        self._remove_entity(LocalConnectivityIndex, 1)
        self._remove_entity(RegionMappingIndex, 1)
        self._remove_entity(ProjectionMatrixIndex, 3)
        self._remove_entity(SurfaceIndex, 6)
        self._remove_entity(ConnectivityAnnotationsIndex, 1)
        self._remove_entity(ConnectivityIndex, 1)

    def test_remove_time_series(self, time_series_region_index_factory):
        """
        Tests the happy flow for the deletion of a time series.
        """
        count_ts = self.count_all_entities(TimeSeriesRegionIndex)
        assert 0 == count_ts, "There should be no time series"
        conn = try_get_last_datatype(self.test_project.id, ConnectivityIndex)
        conn = h5.load_from_index(conn)
        rm = try_get_last_datatype(self.test_project.id, RegionMappingIndex)
        rm = h5.load_from_index(rm)
        time_series_region_index_factory(conn, rm)
        series = self.get_all_entities(TimeSeriesRegionIndex)
        assert 1 == len(series), "There should be only one time series"

        self.project_service.remove_datatype(self.test_project.id, series[0].gid)

        res = dao.get_datatype_by_gid(series[0].gid)
        assert res is None, "The time series was not deleted."

    def test_remove_value_wrapper(self):
        """
        Test the deletion of a value wrapper dataType
        """
        count_vals = self.count_all_entities(ValueWrapperIndex)
        assert 0 == count_vals, "There should be no value wrapper"
        value_wrapper_gid = TestFactory.create_value_wrapper(self.test_user, self.test_project)[1]
        res = dao.get_datatype_by_gid(value_wrapper_gid)
        assert res is not None, "The value wrapper was not created."

        self.project_service.remove_datatype(self.test_project.id, value_wrapper_gid)

        res = dao.get_datatype_by_gid(value_wrapper_gid)
        assert res is None, "The value wrapper was not deleted."