示例#1
0
    def __init__(self):
        micro_postfix = "_%d" % int(time.time() * 1000000)

        # Here create all structures needed later for data types creation
        self.files_helper = FilesHelper()

        # First create user 
        user = model.User("datatype_factory_user" + micro_postfix, "test_pass",
                          "*****@*****.**" + micro_postfix, True, "user")
        self.user = dao.store_entity(user)

        # Now create a project
        project_service = ProjectService()
        data = dict(name='DatatypesFactoryProject' + micro_postfix, description='test_desc', users=[])
        self.project = project_service.store_project(self.user, True, None, **data)

        # Create algorithm
        alg_category = model.AlgorithmCategory('one', True)
        dao.store_entity(alg_category)
        ad = model.Algorithm(SIMULATOR_MODULE, SIMULATOR_CLASS, alg_category.id)
        self.algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE, SIMULATOR_CLASS)
        if self.algorithm is None:
            self.algorithm = dao.store_entity(ad)

        # Create an operation
        self.meta = {DataTypeMetaData.KEY_SUBJECT: self.USER_FULL_NAME,
                     DataTypeMetaData.KEY_STATE: self.DATATYPE_STATE}
        operation = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters',
                                    meta=json.dumps(self.meta), status=model.STATUS_FINISHED)
        self.operation = dao.store_entity(operation)
    def stop_burst_operation(self, operation_id, is_group, remove_after_stop=False):
        """
        For a given operation id that is part of a burst just stop the given burst.
        :returns True when stopped operation was successfully.
        """
        operation_id = int(operation_id)
        if int(is_group) == 0:
            operation = self.flow_service.load_operation(operation_id)
        else:
            op_group = ProjectService.get_operation_group_by_id(operation_id)
            first_op = ProjectService.get_operations_in_group(op_group)[0]
            operation = self.flow_service.load_operation(int(first_op.id))

        try:
            burst_service = BurstService()
            result = burst_service.stop_burst(operation.burst)
            if remove_after_stop:
                current_burst = common.get_from_session(common.KEY_BURST_CONFIG)
                if current_burst and current_burst.id == operation.burst.id:
                    common.remove_from_session(common.KEY_BURST_CONFIG)
                result = burst_service.cancel_or_remove_burst(operation.burst.id) or result

            return result
        except Exception, ex:
            self.logger.exception(ex)
            return False
    def __init__(self):
        micro_postfix = "_%d" % int(time.time() * 1000000)

        # Here create all structures needed later for data types creation
        self.files_helper = FilesHelper()

        # First create user 
        user = model.User("datatype_factory_user" + micro_postfix, "test_pass",
                          "*****@*****.**" + micro_postfix, True, "user")
        self.user = dao.store_entity(user)

        # Now create a project
        project_service = ProjectService()
        data = dict(name='DatatypesFactoryProject' + micro_postfix, description='test_desc', users=[])
        self.project = project_service.store_project(self.user, True, None, **data)

        # Create algorithm
        alg_category = model.AlgorithmCategory('one', True)
        dao.store_entity(alg_category)
        alg_group = model.AlgorithmGroup("test_module1", "classname1", alg_category.id)
        dao.store_entity(alg_group)
        algorithm = model.Algorithm(alg_group.id, 'id', name='', req_data='', param_name='', output='')
        self.algorithm = dao.store_entity(algorithm)

        #Create an operation
        self.meta = {DataTypeMetaData.KEY_SUBJECT: self.USER_FULL_NAME,
                     DataTypeMetaData.KEY_STATE: self.DATATYPE_STATE}
        operation = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters',
                                    meta=json.dumps(self.meta), status=model.STATUS_FINISHED,
                                    method_name=ABCAdapter.LAUNCH_METHOD)
        self.operation = dao.store_entity(operation)
def run_export(project_id, loose_irrelevant=False):

    s = ProjectService()
    mng = ExportManager()

    project = s.find_project(project_id)
    export_file = mng.export_project(project, loose_irrelevant)
    print("Check the exported file: %s" % export_file)
    def import_project_structure(self, uploaded, user_id):
        """
        Execute import operations:
         
        1. check if ZIP or folder 
        2. find all project nodes
        3. for each project node:
            - create project
            - create all operations
            - import all images
            - create all dataTypes
        """

        self.user_id = user_id
        self.created_projects = []

        # Now we compute the name of the file where to store uploaded project
        now = datetime.now()
        date_str = "%d-%d-%d_%d-%d-%d_%d" % (now.year, now.month, now.day, now.hour,
                                             now.minute, now.second, now.microsecond)
        uq_name = "%s-ImportProject" % date_str
        uq_file_name = os.path.join(cfg.TVB_TEMP_FOLDER, uq_name + ".zip")

        temp_folder = None
        try:
            if isinstance(uploaded, FieldStorage) or isinstance(uploaded, Part):
                if uploaded.file:
                    file_obj = open(uq_file_name, 'wb')
                    file_obj.write(uploaded.file.read())
                    file_obj.close()
                else:
                    raise ProjectImportException("Please select the archive which contains the project structure.")
            else:
                shutil.copyfile(uploaded, uq_file_name)

            # Now compute the name of the folder where to explode uploaded ZIP file
            temp_folder = os.path.join(cfg.TVB_TEMP_FOLDER, uq_name)
            try:
                self.files_helper.unpack_zip(uq_file_name, temp_folder)
            except FileStructureException, excep:
                self.logger.exception(excep)
                raise ProjectImportException("Bad ZIP archive provided. A TVB exported project is expected!")

            try:
                self._import_project_from_folder(temp_folder)
            except Exception, excep:
                self.logger.exception(excep)
                self.logger.debug("Error encountered during import. Deleting projects created during this operation.")

                # Roll back projects created so far
                project_service = ProjectService()
                for project in self.created_projects:
                    project_service.remove_project(project.id)

                raise ProjectImportException(str(excep))
 def reload_burst_operation(self, operation_id, is_group, **_):
     """
     Find out from which burst was this operation launched. Set that burst as the selected one and 
     redirect to the burst page.
     """
     is_group = int(is_group)
     if not is_group:
         operation = self.flow_service.load_operation(int(operation_id))
     else:
         op_group = ProjectService.get_operation_group_by_id(operation_id)
         first_op = ProjectService.get_operations_in_group(op_group)[0]
         operation = self.flow_service.load_operation(int(first_op.id))
     operation.burst.prepare_after_load()
     common.add2session(common.KEY_BURST_CONFIG, operation.burst)
     raise cherrypy.HTTPRedirect("/burst/")
示例#7
0
 def _check_permission(self, logged_user_id):
     operation = ProjectService.load_operation_by_gid(
         self.resource_identifier)
     if operation is None:
         raise InvalidIdentifierException()
     return self.check_project_permission(logged_user_id,
                                          operation.fk_launched_in)
    def setUp(self):
        """
        Reset the database before each test.
        """
        initialize_storage()
        user = model.User("test_user", "test_pass", "*****@*****.**", True,
                          "user")
        self.test_user = dao.store_entity(user)
        data = dict(name='test_proj', description='desc', users=[])
        self.test_project = ProjectService().store_project(
            self.test_user, True, None, **data)
        meta = {
            DataTypeMetaData.KEY_SUBJECT: "John Doe",
            DataTypeMetaData.KEY_STATE: "INTERMEDIATE"
        }
        algo_group = dao.find_group(SIMULATOR_MODULE, SIMULATOR_CLASS)
        self.simulator_adapter = FlowService().build_adapter_instance(
            algo_group)

        self.operation = model.Operation(self.test_user.id,
                                         self.test_project.id,
                                         algo_group.id,
                                         json.dumps(SIMULATOR_PARAMETERS),
                                         meta=json.dumps(meta),
                                         status=model.STATUS_STARTED,
                                         method_name=ABCAdapter.LAUNCH_METHOD)
        self.operation = dao.store_entity(self.operation)

        SIMULATOR_PARAMETERS['connectivity'] = self._create_connectivity(
            self.CONNECTIVITY_NODES)
示例#9
0
 def get(self):
     """
     :return a list of logged user's projects
     """
     user = get_current_user()
     projects = ProjectService.retrieve_all_user_projects(user_id=user.id)
     return [ProjectDto(project) for project in projects]
示例#10
0
 def test_burst_delete_with_project(self):
     """
     Test that on removal of a project all burst related data is cleared.
     """
     TestFactory.store_burst(self.test_project.id)
     ProjectService().remove_project(self.test_project.id)
     self._check_burst_removed()
示例#11
0
    def initialize_two_projects(self, simple_datatype_factory,
                                datatype_with_storage_factory):
        """
        Creates a user, an algorithm and 2 projects
        Project src_project will have an operation and 2 datatypes
        Project dest_project will be empty.
        Initializes a flow and a project service
        """
        self.clean_database(delete_folders=True)

        src_user = TestFactory.create_user('Source_User')
        self.src_usr_id = src_user.id
        self.src_project = TestFactory.create_project(src_user,
                                                      "Source_Project")

        self.red_datatype = simple_datatype_factory(subject=self.GEORGE1st)
        self.blue_datatype = datatype_with_storage_factory(
            subject=self.GEORGE2nd)

        # create the destination project
        dst_user = TestFactory.create_user('Destination_User')
        self.dst_usr_id = dst_user.id
        self.dest_project = TestFactory.create_project(dst_user,
                                                       "Destination_Project")

        self.flow_service = FlowService()
        self.project_service = ProjectService()
示例#12
0
    def store_exploration_section(self, val_range, step, dt_group_guid):
        """
        Launching method for further simulations.
        """
        range_list = [float(num) for num in val_range.split(",")]
        step_list = [float(num) for num in step.split(",")]

        datatype_group_ob = ProjectService().get_datatypegroup_by_gid(dt_group_guid)
        operation_grp = datatype_group_ob.parent_operation_group
        operation_obj = self.flow_service.load_operation(datatype_group_ob.fk_from_operation)
        parameters = json.loads(operation_obj.parameters)

        range1name, range1_dict = json.loads(operation_grp.range1)
        range2name, range2_dict = json.loads(operation_grp.range2)
        parameters[RANGE_PARAMETER_1] = range1name
        parameters[RANGE_PARAMETER_2] = range2name

        ##change the existing simulator parameters to be min max step types
        range1_dict = {constants.ATT_MINVALUE: range_list[0],
                       constants.ATT_MAXVALUE: range_list[1],
                       constants.ATT_STEP: step_list[0]}
        range2_dict = {constants.ATT_MINVALUE: range_list[2],
                       constants.ATT_MAXVALUE: range_list[3],
                       constants.ATT_STEP: step_list[1]}
        parameters[range1name] = json.dumps(range1_dict)  # this is for the x axis parameter
        parameters[range2name] = json.dumps(range2_dict)  # this is for the y axis parameter

        OperationService().group_operation_launch(common.get_logged_user().id, common.get_current_project(),
                                                  operation_obj.algorithm.id, operation_obj.algorithm.fk_category,
                                                  datatype_group_ob, **parameters)

        return [True, 'Stored the exploration material successfully']
 def index(self, **data):
     """
     Login page (with or without messages).
     """
     template_specification = dict(mainContent="login",
                                   title="Login",
                                   data=data)
     if cherrypy.request.method == 'POST':
         form = LoginForm()
         try:
             data = form.to_python(data)
             username = data[KEY_USERNAME]
             password = data[KEY_PASSWORD]
             user = self.user_service.check_login(username, password)
             if user is not None:
                 common.add2session(common.KEY_USER, user)
                 common.set_info_message('Welcome ' + username)
                 self.logger.debug("User " + username +
                                   " has just logged in!")
                 if user.selected_project is not None:
                     prj = user.selected_project
                     prj = ProjectService().find_project(prj)
                     self._mark_selected(prj)
                 raise cherrypy.HTTPRedirect('/user/profile')
             else:
                 common.set_error_message(
                     'Wrong username/password, or user not yet validated...'
                 )
                 self.logger.debug("Wrong username " + username + " !!!")
         except formencode.Invalid, excep:
             template_specification[
                 common.KEY_ERRORS] = excep.unpack_errors()
示例#14
0
    def get_operation_status(operation_gid):
        operation = ProjectService.load_operation_by_gid(operation_gid)
        if operation is None:
            get_logger().warning("Invalid operation GID: {}".format(operation_gid))
            raise InvalidIdentifierException()

        return operation.status
示例#15
0
 def test_burst_delete_with_project(self):
     """
     Test that on removal of a project all burst related data is cleared.
     """
     self._prepare_and_launch_sync_burst()
     ProjectService().remove_project(self.test_project.id)
     self._check_burst_removed()
示例#16
0
    def test_get_users_for_project(self):
        """
        Get all members of a project except the current user.
        """
        user_ids = []
        for i in range(5):
            user = model_project.User("test_user" + str(i),
                                      "test_user_no" + str(i), "pass",
                                      "*****@*****.**")
            user = dao.store_entity(user)
            user_ids.append(user.id)
        admin = dao.get_user_by_name("test_user1")
        member1 = dao.get_user_by_name("test_user2")
        member2 = dao.get_user_by_name("test_user4")
        data = dict(name="test_proj",
                    description="test_desc",
                    users=[member1.id, member2.id])
        project = ProjectService().store_project(admin, True, None, **data)
        all_users, members, pag = self.user_service.get_users_for_project(
            admin.username, project.id)
        assert len(members) == 3, "More members than there should be."
        assert len(all_users) == 5
        assert pag == 1, "Invalid total pages number."

        admin_found_member = False
        for user in members:
            if user.username == admin.username:
                admin_found_member = True
        assert admin_found_member, "Admin is expected to be a project member"

        admin_found_editable = False
        for user in all_users:
            if user.username == admin.username:
                admin_found_editable = True
        assert not admin_found_editable, "Admin membership should not be editable"
示例#17
0
    def initialize_two_projects(self, dummy_datatype_index_factory,
                                project_factory, user_factory):
        """
        Creates a user, an algorithm and 2 projects
        Project src_project will have an operation and 2 datatypes
        Project dest_project will be empty.
        Initializes a flow and a project service
        """
        self.clean_database(delete_folders=True)

        self.algorithm_service = AlgorithmService()
        self.project_service = ProjectService()

        # Create the source project with 2 datatypes in it
        src_user = user_factory(username="******")
        self.src_usr_id = src_user.id
        self.src_project = project_factory(src_user, "Src_Project")
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'paupau.zip')
        self.red_datatype = TestFactory.import_zip_connectivity(
            src_user, self.src_project, zip_path, "John")
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'sensors',
                                'eeg_unitvector_62.txt.bz2')
        self.blue_datatype = TestFactory.import_sensors(
            src_user, self.src_project, zip_path, SensorTypes.TYPE_EEG.value)
        assert 1 == self.red_datatypes_in(self.src_project.id)
        assert 1 == self.blue_datatypes_in(self.src_project.id)

        # create the destination project empty
        self.dst_user = user_factory(username='******')
        self.dst_usr_id = self.dst_user.id
        self.dest_project = project_factory(self.dst_user,
                                            "Destination_Project")
        assert 0 == self.red_datatypes_in(self.dest_project.id)
        assert 0 == self.blue_datatypes_in(self.dest_project.id)
示例#18
0
class ProjectMembersResource(RestResource):
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.project_service = ProjectService()
        self.user_service = UserService()
        self.project_dao = CaseDAO()

    def put(self, project_gid):
        """
        Add members to the given project
        :param project_gid: project gid
        :param
        """
        try:
            project = self.project_service.find_project_lazy_by_gid(project_gid)
        except Exception:
            raise InvalidIdentifierException("Invalid project identifier.")

        if get_current_user().id != project.fk_admin:
            raise AuthorizationRequestException("Your are not allowed to edit given project")

        input_data = flask.request.json
        new_members_gid = input_data[
            FormKeyInput.NEW_MEMBERS_GID.value] if FormKeyInput.NEW_MEMBERS_GID.value in input_data else []
        new_members_id = []
        for gid in new_members_gid:
            user = self.user_service.get_user_by_gid(gid)
            if user is None:
                raise InvalidInputException("Invalid user gid {}".format(gid))
            new_members_id.append(user.id)
        self.project_dao.add_members_to_project(project.id, new_members_id)
    def transactional_setup_method(self):
        """
        Prepare before each test.
        """
        self.project_service = ProjectService()
        self.flow_service = FlowService()
        self.structure_helper = FilesHelper()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user,
                                                       "ProjectStructure")

        self.relevant_filter = StaticFiltersFactory.build_datatype_filters(
            single_filter=StaticFiltersFactory.RELEVANT_VIEW)
        self.full_filter = StaticFiltersFactory.build_datatype_filters(
            single_filter=StaticFiltersFactory.FULL_VIEW)
 def transactional_setup_method(self):
     """
     Reset the database before each test.
     """
     self.project_service = ProjectService()
     self.structure_helper = FilesHelper()
     self.test_user = TestFactory.create_user()
示例#21
0
class SimulationFacade:
    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.simulator_service = SimulatorService()
        self.project_service = ProjectService()

    def launch_simulation(self, current_user_id, zip_directory, project_gid):
        try:
            project = self.project_service.find_project_lazy_by_gid(project_gid)
        except ProjectServiceException:
            raise InvalidIdentifierException()

        try:
            simulator_h5_name = DirLoader(zip_directory, None).find_file_for_has_traits_type(Simulator)
            simulator_file = os.path.join(zip_directory, simulator_h5_name)
        except IOError:
            raise InvalidInputException('No Simulator h5 file found in the archive')

        try:
            simulator_algorithm = AlgorithmService().get_algorithm_by_module_and_class(SimulatorAdapter.__module__,
                                                                                       SimulatorAdapter.__name__)
            simulation = self.simulator_service.prepare_simulation_on_server(user_id=current_user_id,
                                                                             project=project,
                                                                             algorithm=simulator_algorithm,
                                                                             zip_folder_path=zip_directory,
                                                                             simulator_file=simulator_file)
            return simulation.gid
        except Exception as excep:
            self.logger.error(excep, exc_info=True)
            raise ServiceException(str(excep))
示例#22
0
    def reload_burst_operation(self, operation_id, is_group, **_):
        """
        Find out from which burst was this operation launched. Set that burst as the selected one and
        redirect to the burst page.
        """
        is_group = int(is_group)
        if not is_group:
            operation = OperationService.load_operation(int(operation_id))
        else:
            op_group = ProjectService.get_operation_group_by_id(operation_id)
            first_op = ProjectService.get_operations_in_group(op_group)[0]
            operation = OperationService.load_operation(int(first_op.id))
        self.simulator_controller.copy_simulator_configuration(
            operation.burst.id)

        raise cherrypy.HTTPRedirect("/burst/")
示例#23
0
    def _export_linked_datatypes(self, project, zip_file):
        linked_paths = ProjectService().get_linked_datatypes_storage_path(
            project)

        if not linked_paths:
            # do not export an empty operation
            return

        # Make a import operation which will contain links to other projects
        algo = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE,
                                           TVB_IMPORTER_CLASS)
        op = model_operation.Operation(None, None, project.id, algo.id)
        op.project = project
        op.algorithm = algo
        op.id = 'links-to-external-projects'
        op.start_now()
        op.mark_complete(model_operation.STATUS_FINISHED)

        op_folder = self.files_helper.get_operation_folder(
            op.project.name, op.id)
        op_folder_name = os.path.basename(op_folder)

        # add linked datatypes to archive in the import operation
        for pth in linked_paths:
            zip_pth = op_folder_name + '/' + os.path.basename(pth)
            zip_file.write(pth, zip_pth)

        # remove these files, since we only want them in export archive
        self.files_helper.remove_folder(op_folder)
示例#24
0
    def generate_users(nr_users, nr_projects):
        """
        The generate_users method will create a clean state db with
        :param nr_users: number of users to be generated (with random roles between
                                CLINICIAN and RESEARCHER and random validated state)
        :param nr_projects: maximum number of projects to be generated for each user
        """
        users = []

        for i in range(nr_users):
            coin_flip = random.randint(0, 1)
            role = 'CLINICIAN' if coin_flip == 1 else 'RESEARCHER'
            password = hash_password("test")
            new_user = User("gen" + str(i), "name" + str(i), password,
                            "*****@*****.**", True, role)
            dao.store_entity(new_user)
            new_user = dao.get_user_by_name("gen" + str(i))
            ExtremeTestFactory.VALIDATION_DICT[new_user.id] = 0
            users.append(new_user)

        for i in range(nr_users):
            current_user = dao.get_user_by_name("gen" + str(i))
            projects_for_user = random.randint(0, nr_projects)
            for j in range(projects_for_user):
                data = dict(name='GeneratedProject' + str(i) + '_' + str(j),
                            description='test_desc',
                            users=ExtremeTestFactory.get_users_ids(
                                random.randint(0, nr_users - 3), nr_users,
                                current_user.id, users))
                ProjectService().store_project(current_user, True, None,
                                               **data)
                ExtremeTestFactory.VALIDATION_DICT[current_user.id] += 1
示例#25
0
    def test_delete_dt_free_HDD_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHDDRequired")
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")

        self._assert_no_dt2()
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project.id,
                                                  adapter, tmp_folder, **data)
        datatype = self._assert_stored_dt2()

        # Now free some space and relaunch
        ProjectService().remove_datatype(self.test_project.id, datatype.gid)
        self._assert_no_dt2()
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project.id,
                                                  adapter, tmp_folder, **data)
        self._assert_stored_dt2()
示例#26
0
    def transactional_setup_method(self):
        """
        Prepare the database before each test.
        """
        self.project_service = ProjectService()
        self.test_user = TestFactory.create_user()

        self.delete_project_folders()
        result = self.count_all_entities(DataType)
        assert 0 == result, "There should be no data type in DB"
        result = self.count_all_entities(Project)
        assert 0 == result

        self.test_project = TestFactory.import_default_project(self.test_user)
        self.operation = TestFactory.create_operation(
            test_user=self.test_user, test_project=self.test_project)
示例#27
0
    def test_delete_dt_free_hdd_space(self, test_adapter_factory,
                                      operation_factory):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        test_adapter_factory(adapter_class=TestAdapterHDDRequired)
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHDDRequired")
        view_model = adapter.get_view_model()()
        TvbProfile.current.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(view_model))

        self._assert_no_ddti()
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project,
                                                  adapter,
                                                  model_view=view_model)
        datatype = self._assert_stored_ddti()

        # Now free some space and relaunch
        ProjectService().remove_datatype(self.test_project.id, datatype.gid)
        self._assert_no_ddti()
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project,
                                                  adapter,
                                                  model_view=view_model)
        self._assert_stored_ddti()
示例#28
0
    def cancel_or_remove_burst(self, burst_id):
        """
        Cancel (if burst is still running) or Remove the burst given by burst_id.
        :returns True when Remove operation was done and False when Cancel
        """
        burst_entity = dao.get_burst_by_id(burst_id)
        if burst_entity.status == burst_entity.BURST_RUNNING:
            self.stop_burst(burst_entity)
            return False

        service = ProjectService()
        ## Remove each DataType in current burst.
        ## We can not leave all on cascade, because it won't work on SQLite for mapped dataTypes.
        datatypes = dao.get_all_datatypes_in_burst(burst_id)
        ## Get operations linked to current burst before removing the burst or else
        ##    the burst won't be there to identify operations any more.
        remaining_ops = dao.get_operations_in_burst(burst_id)

        # Remove burst first to delete work-flow steps which still hold foreign keys to operations.
        correct = dao.remove_entity(burst_entity.__class__, burst_id)
        if not correct:
            raise RemoveDataTypeException("Could not remove Burst entity!")

        for datatype in datatypes:
            service.remove_datatype(burst_entity.fk_project, datatype.gid, False)

        ## Remove all Operations remained.
        correct = True
        remaining_op_groups = set()
        project = dao.get_project_by_id(burst_entity.fk_project)

        for oper in remaining_ops:
            is_remaining = dao.get_generic_entity(oper.__class__, oper.id)
            if len(is_remaining) == 0:
                ### Operation removed cascaded.
                continue
            if oper.fk_operation_group is not None and oper.fk_operation_group not in remaining_op_groups:
                is_remaining = dao.get_generic_entity(model.OperationGroup, oper.fk_operation_group)
                if len(is_remaining) > 0:
                    remaining_op_groups.add(oper.fk_operation_group)
                    correct = correct and dao.remove_entity(model.OperationGroup, oper.fk_operation_group)
            correct = correct and dao.remove_entity(oper.__class__, oper.id)
            service.structure_helper.remove_operation_data(project.name, oper.id)

        if not correct:
            raise RemoveDataTypeException("Could not remove Burst because a linked operation could not be dropped!!")
        return True
示例#29
0
    def cancel_or_remove_burst(self, burst_id):
        """
        Cancel (if burst is still running) or Remove the burst given by burst_id.
        :returns True when Remove operation was done and False when Cancel
        """
        burst_entity = dao.get_burst_by_id(burst_id)
        if burst_entity.status == burst_entity.BURST_RUNNING:
            self.stop_burst(burst_entity)
            return False

        service = ProjectService()
        ## Remove each DataType in current burst.
        ## We can not leave all on cascade, because it won't work on SQLite for mapped dataTypes.
        datatypes = dao.get_all_datatypes_in_burst(burst_id)
        ## Get operations linked to current burst before removing the burst or else 
        ##    the burst won't be there to identify operations any more.
        remaining_ops = dao.get_operations_in_burst(burst_id)

        # Remove burst first to delete work-flow steps which still hold foreign keys to operations.
        correct = dao.remove_entity(burst_entity.__class__, burst_id)
        if not correct:
            raise RemoveDataTypeException("Could not remove Burst entity!")

        for datatype in datatypes:
            service.remove_datatype(burst_entity.fk_project, datatype.gid, False)

        ## Remove all Operations remained.
        correct = True
        remaining_op_groups = set()
        project = dao.get_project_by_id(burst_entity.fk_project)

        for oper in remaining_ops:
            is_remaining = dao.get_generic_entity(oper.__class__, oper.id)
            if len(is_remaining) == 0:
                ### Operation removed cascaded.
                continue
            if oper.fk_operation_group is not None and oper.fk_operation_group not in remaining_op_groups:
                is_remaining = dao.get_generic_entity(model.OperationGroup, oper.fk_operation_group)
                if len(is_remaining) > 0:
                    remaining_op_groups.add(oper.fk_operation_group)
                    correct = correct and dao.remove_entity(model.OperationGroup, oper.fk_operation_group)
            correct = correct and dao.remove_entity(oper.__class__, oper.id)
            service.structure_helper.remove_operation_data(project.name, oper.id)

        if not correct:
            raise RemoveDataTypeException("Could not remove Burst because a linked operation could not be dropped!!")
        return True
class ProjectController(BaseController):
    """
    Displays pages which deals with Project data management.
    """

    PRROJECTS_FOR_LINK_KEY = "projectsforlink"
    PRROJECTS_LINKED_KEY = "projectslinked"
    KEY_OPERATION_FILTERS = "operationfilters"

    def __init__(self):
        super(ProjectController, self).__init__()
        self.project_service = ProjectService()

    @expose_page
    @settings
    def index(self):
        """
        Display project main-menu. Choose one project to work with.
        """
        current_project = common.get_current_project()
        if current_project is None:
            raise cherrypy.HTTPRedirect("/project/viewall")
        template_specification = dict(mainContent="project_submenu",
                                      title="TVB Project Menu")
        return self.fill_default_attributes(template_specification)

    @expose_page
    @settings
    def viewall(self, create=False, page=1, selected_project_id=None, **_):
        """
        Display all existent projects. Choose one project to work with.
        """
        page = int(page)
        if cherrypy.request.method == 'POST' and create:
            raise cherrypy.HTTPRedirect('/project/editone')
        current_user_id = common.get_logged_user().id

        ## Select project if user choose one.
        if selected_project_id is not None:
            try:
                selected_project = self.project_service.find_project(
                    selected_project_id)
                self._mark_selected(selected_project)
            except ProjectServiceException, excep:
                self.logger.error(excep)
                self.logger.warning("Could not select project: " +
                                    str(selected_project_id))
                common.set_error_message("Could not select project: " +
                                         str(selected_project_id))

        #Prepare template response
        prjs, pages_no = self.project_service.retrieve_projects_for_user(
            current_user_id, page)
        template_specification = dict(mainContent="project/viewall",
                                      title="Available TVB Projects",
                                      projectsList=prjs,
                                      page_number=page,
                                      total_pages=pages_no)
        return self.fill_default_attributes(template_specification, 'list')
示例#31
0
class LaunchOperationResource(RestResource):

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.logger = get_logger(self.__class__.__module__)
        self.operation_service = OperationService()
        self.project_service = ProjectService()
        self.user_service = UserService()
        self.files_helper = FilesHelper()

    @check_permission(ProjectAccessPermission, 'project_gid')
    def post(self, project_gid, algorithm_module, algorithm_classname):
        """
        :generic method of launching Analyzers
        """
        model_file = self.extract_file_from_request(request_file_key=RequestFileKey.LAUNCH_ANALYZERS_MODEL_FILE.value)
        destination_folder = RestResource.get_destination_folder()
        h5_path = RestResource.save_temporary_file(model_file, destination_folder)

        try:
            project = self.project_service.find_project_lazy_by_gid(project_gid)
        except ProjectServiceException:
            raise InvalidIdentifierException(INVALID_PROJECT_GID_MESSAGE % project_gid)

        algorithm = FlowService.get_algorithm_by_module_and_class(algorithm_module, algorithm_classname)
        if algorithm is None:
            raise InvalidIdentifierException('No algorithm found for: %s.%s' % (algorithm_module, algorithm_classname))

        try:
            adapter_instance = ABCAdapter.build_adapter(algorithm)
            view_model = adapter_instance.get_view_model_class()()

            view_model_h5 = ViewModelH5(h5_path, view_model)
            view_model_gid = view_model_h5.gid.load()

            current_user = get_current_user()
            operation = self.operation_service.prepare_operation(current_user.id, project.id, algorithm.id,
                                                                 algorithm.algorithm_category, view_model_gid.hex, None,
                                                                 {})
            storage_path = self.files_helper.get_project_folder(project, str(operation.id))

            if isinstance(adapter_instance, ABCUploader):
                for key, value in adapter_instance.get_form_class().get_upload_information().items():
                    data_file = self.extract_file_from_request(request_file_key=key, file_extension=value)
                    data_file_path = RestResource.save_temporary_file(data_file, destination_folder)
                    file_name = os.path.basename(data_file_path)
                    upload_field = getattr(view_model_h5, key)
                    upload_field.store(os.path.join(storage_path, file_name))
                    shutil.move(data_file_path, storage_path)

            shutil.move(h5_path, storage_path)
            os.rmdir(destination_folder)
            view_model_h5.close()
            OperationService().launch_operation(operation.id, True)
        except Exception as excep:
            self.logger.error(excep, exc_info=True)
            raise ServiceException(str(excep))

        return operation.gid, HTTP_STATUS_CREATED
 def prepare_group_launch(self, group_gid, step_key, algorithm_id, **data):
     """
     Receives as input a group gid and an algorithm given by category and id, along
     with data that gives the name of the required input parameter for the algorithm.
     Having these generate a range of GID's for all the DataTypes in the group and
     launch a new operation group.
     """
     prj_service = ProjectService()
     dt_group = prj_service.get_datatypegroup_by_gid(group_gid)
     datatypes = prj_service.get_datatypes_from_datatype_group(dt_group.id)
     range_param_name = data.pop('range_param_name')
     data[RANGE_PARAMETER_1] = range_param_name
     data[range_param_name] = ','.join(dt.gid for dt in datatypes)
     OperationService().group_operation_launch(common.get_logged_user().id, common.get_current_project().id,
                                               int(algorithm_id), int(step_key), **data)
     redirect_url = self._compute_back_link('operations', common.get_current_project())
     raise cherrypy.HTTPRedirect(redirect_url)
示例#33
0
 def prepare_group_launch(self, group_gid, step_key, algorithm_id, **data):
     """
     Receives as input a group gid and an algorithm given by category and id, along
     with data that gives the name of the required input parameter for the algorithm.
     Having these generate a range of GID's for all the DataTypes in the group and
     launch a new operation group.
     """
     prj_service = ProjectService()
     dt_group = prj_service.get_datatypegroup_by_gid(group_gid)
     datatypes = prj_service.get_datatypes_from_datatype_group(dt_group.id)
     range_param_name = data.pop('range_param_name')
     data[RANGE_PARAMETER_1] = range_param_name
     data[range_param_name] = ','.join(dt.gid for dt in datatypes)
     OperationService().group_operation_launch(common.get_logged_user().id, common.get_current_project().id,
                                               int(algorithm_id), int(step_key), **data)
     redirect_url = self._compute_back_link('operations', common.get_current_project())
     raise cherrypy.HTTPRedirect(redirect_url)
 def setUp(self):
     """
     Reset the database before each test.
     """
     config.EVENTS_FOLDER = ''
     self.project_service = ProjectService()
     self.structure_helper = FilesHelper()
     self.test_user = TestFactory.create_user()
    def __init__(self):
        micro_postfix = "_%d" % int(time.time() * 1000000)

        # Here create all structures needed later for data types creation
        self.files_helper = FilesHelper()

        # First create user
        user = model.User("datatype_factory_user" + micro_postfix, "test_pass",
                          "*****@*****.**" + micro_postfix, True, "user")
        self.user = dao.store_entity(user)

        # Now create a project
        project_service = ProjectService()
        data = dict(name='DatatypesFactoryProject' + micro_postfix,
                    description='test_desc',
                    users=[])
        self.project = project_service.store_project(self.user, True, None,
                                                     **data)

        # Create algorithm
        alg_category = model.AlgorithmCategory('one', True)
        dao.store_entity(alg_category)
        alg_group = model.AlgorithmGroup("test_module1", "classname1",
                                         alg_category.id)
        dao.store_entity(alg_group)
        algorithm = model.Algorithm(alg_group.id,
                                    'id',
                                    name='',
                                    req_data='',
                                    param_name='',
                                    output='')
        self.algorithm = dao.store_entity(algorithm)

        #Create an operation
        self.meta = {
            DataTypeMetaData.KEY_SUBJECT: self.USER_FULL_NAME,
            DataTypeMetaData.KEY_STATE: self.DATATYPE_STATE
        }
        operation = model.Operation(self.user.id,
                                    self.project.id,
                                    self.algorithm.id,
                                    'test parameters',
                                    meta=json.dumps(self.meta),
                                    status=model.STATUS_FINISHED,
                                    method_name=ABCAdapter.LAUNCH_METHOD)
        self.operation = dao.store_entity(operation)
示例#36
0
class GetDataInProjectResource(RestResource):
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.project_service = ProjectService()

    def get(self, project_gid):
        """
        :return a list of DataType instances (subclasses) associated with the current project
        """
        try:
            project = self.project_service.find_project_lazy_by_gid(
                project_gid)
        except ProjectServiceException:
            raise InvalidIdentifierException(INVALID_PROJECT_GID_MESSAGE %
                                             project_gid)

        datatypes = self.project_service.get_datatypes_in_project(project.id)
        return [DataTypeDto(datatype) for datatype in datatypes]
示例#37
0
    def get(self, operation_gid):
        """
        :return status of an operation
        """
        operation = ProjectService.load_operation_by_gid(operation_gid)
        if operation is None:
            raise InvalidIdentifierException(INVALID_OPERATION_GID_MESSAGE % operation_gid)

        return operation.status
示例#38
0
    def transactional_setup_method(self):
        """
        Reset the database before each test.
        """
        self.import_service = ImportService()
        self.flow_service = FlowService()
        self.project_service = ProjectService()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user,
                                                       name="GeneratedProject",
                                                       description="test_desc")
        self.operation = TestFactory.create_operation(
            test_user=self.test_user, test_project=self.test_project)
        self.adapter_instance = TestFactory.create_adapter()
        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.zip_path = None
示例#39
0
def new_project(name):
    usr = UserService.get_administrators()[0]
    proj = ProjectService().store_project(usr,
                                          True,
                                          None,
                                          name=name,
                                          description=name,
                                          users=[usr])
    return proj
示例#40
0
    def setUp(self):
        """
        Prepare the database before each test.
        """
        self.import_service = ImportService()
        self.flow_service = FlowService()
        self.project_service = ProjectService()
        self.test_user = TestFactory.create_user()

        self.delete_project_folders()
        result = self.count_all_entities(DataType)
        self.assertEqual(0, result, "There should be no data type in DB")
        result = self.count_all_entities(Project)
        self.assertEqual(0, result)

        self.test_project = TestFactory.import_default_project(self.test_user)
        self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
        self.adapter_instance = TestFactory.create_adapter(test_project=self.test_project)
 def stop_operation(self, operation_id, is_group, remove_after_stop=False):
     """
     Stop the operation given by operation_id. If is_group is true stop all the
     operations from that group.
     """
     operation_service = OperationService()
     result = False
     if int(is_group) == 0:
         result = operation_service.stop_operation(operation_id)
         if remove_after_stop:
             ProjectService().remove_operation(operation_id)
     else:
         op_group = ProjectService.get_operation_group_by_id(operation_id)
         operations_in_group = ProjectService.get_operations_in_group(op_group)
         for operation in operations_in_group:
             tmp_res = operation_service.stop_operation(operation.id)
             if remove_after_stop:
                 ProjectService().remove_operation(operation.id)
             result = result or tmp_res
     return result
class ProjectController(BaseController):
    """
    Displays pages which deals with Project data management.
    """

    PRROJECTS_FOR_LINK_KEY = "projectsforlink"
    PRROJECTS_LINKED_KEY = "projectslinked"
    KEY_OPERATION_FILTERS = "operationfilters"

    def __init__(self):
        super(ProjectController, self).__init__()
        self.project_service = ProjectService()


    @expose_page
    @settings
    def index(self):
        """
        Display project main-menu. Choose one project to work with.
        """
        current_project = common.get_current_project()
        if current_project is None:
            raise cherrypy.HTTPRedirect("/project/viewall")
        template_specification = dict(mainContent="project_submenu", title="TVB Project Menu")
        return self.fill_default_attributes(template_specification)


    @expose_page
    @settings
    def viewall(self, create=False, page=1, selected_project_id=None, **_):
        """
        Display all existent projects. Choose one project to work with.
        """
        page = int(page)
        if cherrypy.request.method == 'POST' and create:
            raise cherrypy.HTTPRedirect('/project/editone')
        current_user_id = common.get_logged_user().id

        ## Select project if user choose one.
        if selected_project_id is not None:
            try:
                selected_project = self.project_service.find_project(selected_project_id)
                self._mark_selected(selected_project)
            except ProjectServiceException, excep:
                self.logger.error(excep)
                self.logger.warning("Could not select project: " + str(selected_project_id))
                common.set_error_message("Could not select project: " + str(selected_project_id))

        #Prepare template response
        prjs, pages_no = self.project_service.retrieve_projects_for_user(current_user_id, page)
        template_specification = dict(mainContent="project/viewall", title="Available TVB Projects",
                                      projectsList=prjs, page_number=page, total_pages=pages_no)
        return self.fill_default_attributes(template_specification, 'list')
    def setUp(self):
        """
        Prepare before each test.
        """
        self.project_service = ProjectService()
        self.flow_service = FlowService()
        self.structure_helper = FilesHelper()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user, "ProjectStructure")

        self.relevant_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.RELEVANT_VIEW)
        self.full_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.FULL_VIEW)
class EventHandlerTest(BaseTestCase):
    """
    This class contains tests for the tvb.core.services.event_handler module.
    """ 
       
    def setUp(self):
        """
        Reset the database before each test.
        """
        self.project_service = ProjectService()
        self.test_user = TestFactory.create_user()
        
        
    def tearDown(self):
        """
        Cleans the environment after testing (database and executors dictionary)
        """
        self.clean_database()
        event_handlers.EXECUTORS_DICT = {}
        
        
    def test_handle_event(self):
        """
        Test a defined handler for the store project method.
        """
        path_to_events = os.path.dirname(__file__)
        event_handlers.read_events([path_to_events])
        data = dict(name="test_project", description="test_description", users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0, "Database reset probably failed!")
        test_project = self.project_service.store_project(self.test_user, True, None, **data)
        # Operations will start asynchronously; Give them time.
        time.sleep(1)
        gid = dao.get_last_data_with_uid("test_uid")
        self.assertTrue(gid is not None, "Nothing was stored in database!")
        datatype = dao.get_datatype_by_gid(gid)
        self.assertEqual(datatype.type, "Datatype1", "Wrong data stored!")
        self.project_service._remove_project_node_files(test_project.id, gid)
 def setUp(self):
     """
     Reset the database before each test.
     """
     self.import_service = ImportService()
     self.flow_service = FlowService()
     self.project_service = ProjectService()
     
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user, name="GeneratedProject", description="test_desc")
     self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
     self.adapter_instance = TestFactory.create_adapter(test_project=self.test_project)
     TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
     self.zip_path = None 
    def _get_effective_data_type(self, data):
        """
        This method returns the data type for the provided data.
        - If current data is a simple data type is returned.
        - If it is an data type group, we return the first element. Only one element is
        necessary since all group elements are the same type.
        """
        # first check if current data is a DataTypeGroup
        if self.is_data_a_group(data):
            data_types = ProjectService.get_datatypes_from_datatype_group(data.id)

            if data_types is not None and len(data_types) > 0:
                # Since all objects in a group are the same type it's enough
                return ABCAdapter.load_entity_by_gid(data_types[0].gid)
            else:
                return None
        else:
            return data
示例#47
0
    def setUp(self):
        """
        Prepare the database before each test.
        """
        self.import_service = ImportService()
        self.flow_service = FlowService()
        self.project_service = ProjectService()
        self.test_user = TestFactory.create_user()

        self.delete_project_folders()
        result = self.count_all_entities(DataType)
        self.assertEqual(0, result, "There should be no data type in DB")
        result = self.count_all_entities(Project)
        self.assertEqual(0, result)

        self.test_project = TestFactory.import_default_project(self.test_user)
        self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
        self.adapter_instance = TestFactory.create_adapter(test_project=self.test_project)
    def _get_all_data_types_arr(self, data):
        """
        This method builds an array with all data types to be processed later.
        - If current data is a simple data type is added to an array.
        - If it is an data type group all its children are loaded and added to array.
        """
        # first check if current data is a DataTypeGroup
        if self.is_data_a_group(data):
            data_types = ProjectService.get_datatypes_from_datatype_group(data.id)

            result = []
            if data_types is not None and len(data_types) > 0:
                for data_type in data_types:
                    entity = ABCAdapter.load_entity_by_gid(data_type.gid)
                    result.append(entity)

            return result

        else:
            return [data]
class ProjectStructureTest(TransactionalTestCase):
    """
    Test ProjectService methods (part related to Project Data Structure).
    """

    def setUp(self):
        """
        Prepare before each test.
        """
        self.project_service = ProjectService()
        self.flow_service = FlowService()
        self.structure_helper = FilesHelper()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user, "ProjectStructure")

        self.relevant_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.RELEVANT_VIEW)
        self.full_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.FULL_VIEW)

    
    def tearDown(self):
        """
        Clear project folders after testing
        """
        self.delete_project_folders()


    def test_set_operation_visibility(self):
        """
        Check if the visibility for an operation is set correct.
        """
        self.__init_algorithmn()
        op1 = model.Operation(self.test_user.id, self.test_project.id, self.algo_inst.id, "")
        op1 = dao.store_entity(op1)
        self.assertTrue(op1.visible, "The operation should be visible.")
        self.project_service.set_operation_and_group_visibility(op1.gid, False)
        updated_op = dao.get_operation_by_id(op1.id)
        self.assertFalse(updated_op.visible, "The operation should not be visible.")


    def test_set_op_and_group_visibility(self):
        """
        When changing the visibility for an operation that belongs to an operation group, we
        should also change the visibility for the entire group of operations.
        """
        _, group_id = TestFactory.create_group(self.test_user, subject="test-subject-1")
        list_of_operations = dao.get_operations_in_group(group_id)
        for operation in list_of_operations:
            self.assertTrue(operation.visible, "The operation should be visible.")
        self.project_service.set_operation_and_group_visibility(list_of_operations[0].gid, False)
        operations = dao.get_operations_in_group(group_id)
        for operation in operations:
            self.assertFalse(operation.visible, "The operation should not be visible.")


    def test_set_op_group_visibility(self):
        """
        Tests if the visibility for an operation group is set correct.
        """
        _, group_id = TestFactory.create_group(self.test_user, subject="test-subject-1")
        list_of_operations = dao.get_operations_in_group(group_id)
        for operation in list_of_operations:
            self.assertTrue(operation.visible, "The operation should be visible.")
        op_group = dao.get_operationgroup_by_id(group_id)
        self.project_service.set_operation_and_group_visibility(op_group.gid, False, True)
        operations = dao.get_operations_in_group(group_id)
        for operation in operations:
            self.assertFalse(operation.visible, "The operation should not be visible.")


    def test_is_upload_operation(self):
        """
        Tests that upload and non-upload algorithms are created and run accordingly
        """
        self.__init_algorithmn()
        upload_algo = self._create_algo_for_upload()
        op1 = model.Operation(self.test_user.id, self.test_project.id, self.algo_inst.id, "")
        op2 = model.Operation(self.test_user.id, self.test_project.id, upload_algo.id, "")
        operations = dao.store_entities([op1, op2])
        is_upload_operation = self.project_service.is_upload_operation(operations[0].gid)
        self.assertFalse(is_upload_operation, "The operation is not an upload operation.")
        is_upload_operation = self.project_service.is_upload_operation(operations[1].gid)
        self.assertTrue(is_upload_operation, "The operation is an upload operation.")


    def test_get_upload_operations(self):
        """
        Test get_all when filter is for Upload category.
        """
        self.__init_algorithmn()
        upload_algo = self._create_algo_for_upload()

        project = model.Project("test_proj_2", self.test_user.id, "desc")
        project = dao.store_entity(project)

        op1 = model.Operation(self.test_user.id, self.test_project.id, self.algo_inst.id, "")
        op2 = model.Operation(self.test_user.id, project.id, upload_algo.id, "", status=model.STATUS_FINISHED)
        op3 = model.Operation(self.test_user.id, self.test_project.id, upload_algo.id, "")
        op4 = model.Operation(self.test_user.id, self.test_project.id, upload_algo.id, "", status=model.STATUS_FINISHED)
        op5 = model.Operation(self.test_user.id, self.test_project.id, upload_algo.id, "", status=model.STATUS_FINISHED)
        operations = dao.store_entities([op1, op2, op3, op4, op5])

        upload_operations = self.project_service.get_all_operations_for_uploaders(self.test_project.id)
        self.assertEqual(2, len(upload_operations), "Wrong number of upload operations.")
        upload_ids = [operation.id for operation in upload_operations]
        for i in [3, 4]:
            self.assertTrue(operations[i].id in upload_ids,
                            "The operation should be an upload operation.")
        for i in [0, 1, 2]:                    
            self.assertFalse(operations[i].id in upload_ids, 
                             "The operation should not be an upload operation.")


    def test_is_datatype_group(self):
        """
        Tests if a datatype is group.
        """
        _, dt_group_id, first_dt, _ = self._create_datatype_group()
        dt_group = dao.get_generic_entity(model.DataTypeGroup, dt_group_id)[0]
        is_dt_group = self.project_service.is_datatype_group(dt_group.gid)
        self.assertTrue(is_dt_group, "The datatype should be a datatype group.")
        is_dt_group = self.project_service.is_datatype_group(first_dt.gid)
        self.assertFalse(is_dt_group, "The datatype should not be a datatype group.")


    def test_count_datatypes_in_group(self):
        """ Test that counting dataTypes is correct. Happy flow."""
        _, dt_group_id, first_dt, _ = self._create_datatype_group()
        count = dao.count_datatypes_in_group(dt_group_id)
        self.assertEqual(count, 2)
        count = dao.count_datatypes_in_group(first_dt.id)
        self.assertEqual(count, 0, "There should be no dataType.")


    def test_set_datatype_visibility(self):
        """
        Check if the visibility for a datatype is set correct.
        """
        #it's a list of 3 elem.
        mapped_arrays = self._create_mapped_arrays(self.test_project.id)
        for mapped_array in mapped_arrays:
            is_visible = dao.get_datatype_by_id(mapped_array[0]).visible
            self.assertTrue(is_visible, "The data type should be visible.")

        self.project_service.set_datatype_visibility(mapped_arrays[0][2], False)
        for i in range(len(mapped_arrays)):
            is_visible = dao.get_datatype_by_id(mapped_arrays[i][0]).visible
            if not i:
                self.assertFalse(is_visible, "The data type should not be visible.")
            else:
                self.assertTrue(is_visible, "The data type should be visible.")


    def test_set_visibility_for_dt_in_group(self):
        """
        Check if the visibility for a datatype from a datatype group is set correct.
        """
        _, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        self.assertTrue(first_dt.visible, "The data type should be visible.")
        self.assertTrue(second_dt.visible, "The data type should be visible.")
        self.project_service.set_datatype_visibility(first_dt.gid, False)

        db_dt_group = self.project_service.get_datatype_by_id(dt_group_id)
        db_first_dt = self.project_service.get_datatype_by_id(first_dt.id)
        db_second_dt = self.project_service.get_datatype_by_id(second_dt.id)

        self.assertFalse(db_dt_group.visible, "The data type should be visible.")
        self.assertFalse(db_first_dt.visible, "The data type should not be visible.")
        self.assertFalse(db_second_dt.visible, "The data type should be visible.")


    def test_set_visibility_for_group(self):
        """
        Check if the visibility for a datatype group is set correct.
        """
        _, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        dt_group = dao.get_generic_entity(model.DataTypeGroup, dt_group_id)[0]

        self.assertTrue(dt_group.visible, "The data type group should be visible.")
        self.assertTrue(first_dt.visible, "The data type should be visible.")
        self.assertTrue(second_dt.visible, "The data type should be visible.")
        self.project_service.set_datatype_visibility(dt_group.gid, False)

        updated_dt_group = self.project_service.get_datatype_by_id(dt_group_id)
        updated_first_dt = self.project_service.get_datatype_by_id(first_dt.id)
        updated_second_dt = self.project_service.get_datatype_by_id(second_dt.id)

        self.assertFalse(updated_dt_group.visible, "The data type group should be visible.")
        self.assertFalse(updated_first_dt.visible, "The data type should be visible.")
        self.assertFalse(updated_second_dt.visible, "The data type should be visible.")


    def test_getdatatypes_from_dtgroup(self):
        """
        Validate that we can retrieve all DTs from a DT_Group
        """
        _, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        datatypes = self.project_service.get_datatypes_from_datatype_group(dt_group_id)
        self.assertEqual(len(datatypes), 2, "There should be 2 datatypes into the datatype group.")
        expected_dict = {first_dt.id: first_dt, second_dt.id: second_dt}
        actual_dict = {datatypes[0].id: datatypes[0], datatypes[1].id: datatypes[1]}

        for key in expected_dict.keys():
            expected = expected_dict[key]
            actual = actual_dict[key]
            self.assertEqual(expected.id, actual.id, "Not the same id.")
            self.assertEqual(expected.gid, actual.gid, "Not the same gid.")
            self.assertEqual(expected.type, actual.type, "Not the same type.")
            self.assertEqual(expected.subject, actual.subject, "Not the same subject.")
            self.assertEqual(expected.state, actual.state, "Not the same state.")
            self.assertEqual(expected.visible, actual.visible, "The datatype visibility is not correct.")
            self.assertEqual(expected.module, actual.module, "Not the same module.")
            self.assertEqual(expected.user_tag_1, actual.user_tag_1, "Not the same user_tag_1.")
            self.assertEqual(expected.invalid, actual.invalid, "The invalid field value is not correct.")
            self.assertEqual(expected.is_nan, actual.is_nan, "The is_nan field value is not correct.")


    def test_get_operations_for_dt(self):
        """
        Tests method get_operations_for_datatype.
        Verifies result dictionary has the correct values
        """
        created_ops, datatype_gid = self._create_operations_with_inputs()
        operations = self.project_service.get_operations_for_datatype(datatype_gid, self.relevant_filter)
        self.assertEqual(len(operations), 2)
        self.assertTrue(created_ops[0].id in [operations[0].id, operations[1].id], "Retrieved wrong operations.")
        self.assertTrue(created_ops[2].id in [operations[0].id, operations[1].id], "Retrieved wrong operations.")

        operations = self.project_service.get_operations_for_datatype(datatype_gid, self.full_filter)
        self.assertEqual(len(operations), 4)
        ids = [operations[0].id, operations[1].id, operations[2].id, operations[3].id]
        for i in range(4):
            self.assertTrue(created_ops[i].id in ids, "Retrieved wrong operations.")

        operations = self.project_service.get_operations_for_datatype(datatype_gid, self.relevant_filter, True)
        self.assertEqual(len(operations), 1)
        self.assertEqual(created_ops[4].id, operations[0].id, "Incorrect number of operations.")

        operations = self.project_service.get_operations_for_datatype(datatype_gid, self.full_filter, True)
        self.assertEqual(len(operations), 2)
        self.assertTrue(created_ops[4].id in [operations[0].id, operations[1].id], "Retrieved wrong operations.")
        self.assertTrue(created_ops[5].id in [operations[0].id, operations[1].id], "Retrieved wrong operations.")


    def test_get_operations_for_dt_group(self):
        """
        Tests method get_operations_for_datatype_group.
        Verifies filters' influence over results is as expected
        """
        created_ops, dt_group_id = self._create_operations_with_inputs(True)

        ops = self.project_service.get_operations_for_datatype_group(dt_group_id, self.relevant_filter)
        self.assertEqual(len(ops), 2)
        self.assertTrue(created_ops[0].id in [ops[0].id, ops[1].id], "Retrieved wrong operations.")
        self.assertTrue(created_ops[2].id in [ops[0].id, ops[1].id], "Retrieved wrong operations.")

        ops = self.project_service.get_operations_for_datatype_group(dt_group_id, self.full_filter)
        self.assertEqual(len(ops), 4, "Incorrect number of operations.")
        ids = [ops[0].id, ops[1].id, ops[2].id, ops[3].id]
        for i in range(4):
            self.assertTrue(created_ops[i].id in ids, "Retrieved wrong operations.")

        ops = self.project_service.get_operations_for_datatype_group(dt_group_id, self.relevant_filter, True)
        self.assertEqual(len(ops), 1)
        self.assertEqual(created_ops[4].id, ops[0].id, "Incorrect number of operations.")

        ops = self.project_service.get_operations_for_datatype_group(dt_group_id, self.full_filter, True)
        self.assertEqual(len(ops), 2)
        self.assertTrue(created_ops[4].id in [ops[0].id, ops[1].id], "Retrieved wrong operations.")
        self.assertTrue(created_ops[5].id in [ops[0].id, ops[1].id], "Retrieved wrong operations.")


    def test_get_inputs_for_operation(self):
        """
        Tests method get_datatype_and_datatypegroup_inputs_for_operation.
        Verifies filters' influence over results is as expected
        """
        algo = dao.get_algorithm_by_module('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')

        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        ids = []
        for datatype in array_wrappers:
            ids.append(datatype[0])

        datatype = dao.get_datatype_by_id(ids[0])
        datatype.visible = False
        dao.store_entity(datatype)

        parameters = json.dumps({"param_5": "1", "param_1": array_wrappers[0][2],
                                 "param_2": array_wrappers[1][2], "param_3": array_wrappers[2][2], "param_6": "0"})
        operation = model.Operation(self.test_user.id, self.test_project.id, algo.id, parameters)
        operation = dao.store_entity(operation)

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.relevant_filter)
        self.assertEqual(len(inputs), 2)
        self.assertTrue(ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertFalse(ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.full_filter)
        self.assertEqual(len(inputs), 3, "Incorrect number of operations.")
        self.assertTrue(ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.")
        self.assertTrue(ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.")
        self.assertTrue(ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.")

        project, dt_group_id, first_dt, _ = self._create_datatype_group()
        first_dt.visible = False
        dao.store_entity(first_dt)
        parameters = json.dumps({"other_param": "_", "param_1": first_dt.gid})
        operation = model.Operation(self.test_user.id, project.id, algo.id, parameters)
        operation = dao.store_entity(operation)

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.relevant_filter)
        self.assertEqual(len(inputs), 0, "Incorrect number of dataTypes.")
        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertEqual(inputs[0].id, dt_group_id, "Wrong dataType.")
        self.assertTrue(inputs[0].id != first_dt.id, "Wrong dataType.")


    def test_get_inputs_for_op_group(self):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The DataType inputs will be from a DataType group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        first_dt.visible = False
        dao.store_entity(first_dt)
        second_dt.visible = False
        dao.store_entity(second_dt)

        op_group = model.OperationGroup(project.id, "group", "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({"param_5": "1", "param_1": first_dt.gid, "param_6": "2"})
        params_2 = json.dumps({"param_5": "1", "param_4": second_dt.gid, "param_6": "5"})

        algo = dao.get_algorithm_by_module('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')

        op1 = model.Operation(self.test_user.id, project.id, algo.id, params_1, op_group_id=op_group.id)
        op2 = model.Operation(self.test_user.id, project.id, algo.id, params_2, op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 0)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")

        first_dt.visible = True
        dao.store_entity(first_dt)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")


    def test_get_inputs_for_op_group_simple_inputs(self):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The dataType inputs will not be part of a dataType group.
        """
        #it's a list of 3 elem.
        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        array_wrapper_ids = []
        for datatype in array_wrappers:
            array_wrapper_ids.append(datatype[0])

        datatype = dao.get_datatype_by_id(array_wrapper_ids[0])
        datatype.visible = False
        dao.store_entity(datatype)

        op_group = model.OperationGroup(self.test_project.id, "group", "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({"param_5": "2", "param_1": array_wrappers[0][2],
                               "param_2": array_wrappers[1][2], "param_6": "7"})
        params_2 = json.dumps({"param_5": "5", "param_3": array_wrappers[2][2],
                               "param_2": array_wrappers[1][2], "param_6": "6"})

        algo = dao.get_algorithm_by_module('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')

        op1 = model.Operation(self.test_user.id, self.test_project.id, algo.id, params_1, op_group_id=op_group.id)
        op2 = model.Operation(self.test_user.id, self.test_project.id, algo.id, params_2, op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 2)
        self.assertFalse(array_wrapper_ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(array_wrapper_ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(array_wrapper_ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 3, "Incorrect number of dataTypes.")
        self.assertTrue(array_wrapper_ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id])
        self.assertTrue(array_wrapper_ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id])
        self.assertTrue(array_wrapper_ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id])


    def test_remove_datatype(self):
        """
        Tests the deletion of a datatype.
        """
        #it's a list of 3 elem.
        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        dt_list = []
        for array_wrapper in array_wrappers:
            dt_list.append(dao.get_datatype_by_id(array_wrapper[0]))

        self.project_service.remove_datatype(self.test_project.id, dt_list[0].gid)
        self._check_if_datatype_was_removed(dt_list[0])


    def test_remove_datatype_from_group(self):
        """
        Tests the deletion of a datatype group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        datatype_group = dao.get_generic_entity(model.DataTypeGroup, dt_group_id)[0]

        self.project_service.remove_datatype(project.id, first_dt.gid)
        self._check_if_datatype_was_removed(first_dt)
        self._check_if_datatype_was_removed(second_dt)
        self._check_if_datatype_was_removed(datatype_group)
        self._check_datatype_group_removed(dt_group_id, datatype_group.fk_operation_group)


    def test_remove_datatype_group(self):
        """
        Tests the deletion of a datatype group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        datatype_group = dao.get_generic_entity(model.DataTypeGroup, dt_group_id)[0]

        self.project_service.remove_datatype(project.id, datatype_group.gid)
        self._check_if_datatype_was_removed(first_dt)
        self._check_if_datatype_was_removed(second_dt)
        self._check_if_datatype_was_removed(datatype_group)
        self._check_datatype_group_removed(dt_group_id, datatype_group.fk_operation_group)


    def _create_mapped_arrays(self, project_id):
        """
        :param project_id: the project in which the arrays are created
        :return: a list of dummy `MappedArray`
        """
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 0)
        
        group = dao.get_algorithm_by_module('tvb.tests.framework.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter')
        adapter_instance = ABCAdapter.build_adapter(group)
        data = {'param_1': 'some value'}
        #create 3 data types
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 1)
        
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 2)
        
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        array_wrappers, count = self.flow_service.get_available_datatypes(project_id,
                                                                          "tvb.datatypes.arrays.MappedArray")
        self.assertEqual(count, 3)

        return array_wrappers


    def _create_operation(self, project_id, algorithm_id):
        """
        dummy operation
        :param project_id: the project in which the operation is created
        :param algorithm_id: the algorithm to be run for the operation
        :return: a dummy `Operation` with the given specifications
        """
        algorithm = dao.get_algorithm_by_id(algorithm_id)
        meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe",
                DataTypeMetaData.KEY_STATE: "RAW_DATA"}
        operation = model.Operation(self.test_user.id, project_id, algorithm.id, 'test params',
                                    meta=json.dumps(meta), status=model.STATUS_FINISHED)
        return dao.store_entity(operation)


    def _create_datatype_group(self):
        """
        Creates a project, one DataTypeGroup with 2 DataTypes into the new group.
        """
        test_project = TestFactory.create_project(self.test_user, "NewProject")

        all_operations = dao.get_filtered_operations(test_project.id, None, is_count=True)
        self.assertEqual(0, all_operations, "There should be no operation.")
        
        datatypes, op_group_id = TestFactory.create_group(self.test_user, test_project)
        dt_group = dao.get_datatypegroup_by_op_group_id(op_group_id)

        return test_project, dt_group.id, datatypes[0], datatypes[1]



    def _create_operations_with_inputs(self, is_group_parent=False):
        """
        Method used for creating a complex tree of operations.

        If 'if_group_parent' is True then a new group will be created and one of its entries it will be used as
        input for the returned operations.
        """
        group_dts, root_op_group_id = TestFactory.create_group(self.test_user, self.test_project)
        if is_group_parent:
            datatype_gid = group_dts[0].gid
        else:
            datatype_gid = ProjectServiceTest._create_value_wrapper(self.test_user, self.test_project)[1]

        parameters = json.dumps({"param_name": datatype_gid})

        ops = []
        for i in range(4):
            ops.append(TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project))
            if i in [1, 3]:
                ops[i].visible = False
            ops[i].parameters = parameters
            ops[i] = dao.store_entity(ops[i])
            
        #groups
        _, ops_group = TestFactory.create_group(self.test_user, self.test_project)
        ops_group = dao.get_operations_in_group(ops_group)
        self.assertEqual(2, len(ops_group))
        ops_group[0].parameters = parameters
        ops_group[0] = dao.store_entity(ops_group[0])
        ops_group[1].visible = False
        ops_group[1].parameters = parameters
        ops_group[1] = dao.store_entity(ops_group[1])

        ops.extend(ops_group)
        if is_group_parent:
            dt_group = dao.get_datatypegroup_by_op_group_id(root_op_group_id)
            return ops, dt_group.id
        return ops, datatype_gid


    def _check_if_datatype_was_removed(self, datatype):
        """
        Check if a certain datatype was removed.
        """
        try:
            dao.get_datatype_by_id(datatype.id)
            self.fail("The datatype was not deleted.")
        except Exception:
            pass
        try:
            dao.get_operation_by_id(datatype.fk_from_operation)
            self.fail("The operation was not deleted.")
        except Exception:
            pass


    def _check_datatype_group_removed(self, datatype_group_id, operation_groupp_id):
        """
        Checks if the DataTypeGroup and OperationGroup was removed.
        """
        try:
            dao.get_generic_entity(model.DataTypeGroup, datatype_group_id)
            self.fail("The DataTypeGroup entity was not removed.")
        except Exception:
            pass

        try:
            dao.get_operationgroup_by_id(operation_groupp_id)
            self.fail("The OperationGroup entity was not removed.")
        except Exception:
            pass


    def __init_algorithmn(self):
        """
        Insert some starting data in the database.
        """
        categ1 = model.AlgorithmCategory('one', True)
        self.categ1 = dao.store_entity(categ1)
        ad = model.Algorithm(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS, categ1.id)
        self.algo_inst = dao.store_entity(ad)

    @staticmethod
    def _create_algo_for_upload():
        """ Creates a fake algorithm for an upload category. """
        category = dao.store_entity(model.AlgorithmCategory("upload_category", rawinput=True))
        return dao.store_entity(model.Algorithm("module", "classname", category.id))
class ProjectServiceTest(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.services.project_service module.
    """    
    
    def setUp(self):
        """
        Reset the database before each test.
        """
        config.EVENTS_FOLDER = ''
        self.project_service = ProjectService()
        self.structure_helper = FilesHelper()
        self.test_user = TestFactory.create_user()
    
    
    def tearDown(self):
        """
        Remove project folders and clean up database.
        """
        created_projects = dao.get_projects_for_user(self.test_user.id)
        for project in created_projects:
            self.structure_helper.remove_project_structure(project.name)
        self.delete_project_folders()
    
    
    def test_create_project_happy_flow(self):
        """
        Standard flow for creating a new project.
        """
        user1 = TestFactory.create_user('test_user1')
        user2 = TestFactory.create_user('test_user2')
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0, "Database reset probably failed!")
        TestFactory.create_project(self.test_user, 'test_project', users=[user1.id, user2.id])
        resulting_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(resulting_projects), 1, "Project with valid data not inserted!")  
        project = resulting_projects[0]
        if project.name == "test_project":
            self.assertEqual(project.description, "description", "Description do no match")
            users_for_project = dao.get_members_of_project(project.id)
            for user in users_for_project:
                self.assertTrue(user.id in [user1.id, user2.id], "Users not stored properly.")
        self.assertTrue(os.path.exists(os.path.join(TvbProfile.current.TVB_STORAGE, FilesHelper.PROJECTS_FOLDER,
                                                    "test_project")), "Folder for project was not created")
   
   
    def test_create_project_empty_name(self):
        """
        Creating a project with an empty name.
        """
        data = dict(name="", description="test_description", users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0, "Database reset probably failed!")
        self.assertRaises(ProjectServiceException, self.project_service.store_project, 
                          self.test_user, True, None, **data)
   
   
    def test_edit_project_happy_flow(self):
        """
        Standard flow for editing an existing project.
        """
        selected_project = TestFactory.create_project(self.test_user, 'test_proj')
        proj_root = self.structure_helper.get_project_folder(selected_project)
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 1, "Database initialization probably failed!")
        
        edited_data = dict(name="test_project", description="test_description", users=[])
        edited_project = self.project_service.store_project(self.test_user, False, selected_project.id, **edited_data)
        self.assertFalse(os.path.exists(proj_root), "Previous folder not deleted")
        proj_root = self.structure_helper.get_project_folder(edited_project)
        self.assertTrue(os.path.exists(proj_root), "New folder not created!")
        self.assertNotEqual(selected_project.name, edited_project.name, "Project was no changed!")  
        
             
    def test_edit_project_unexisting(self):
        """
        Trying to edit an un-existing project.
        """
        selected_project = TestFactory.create_project(self.test_user, 'test_proj')
        self.structure_helper.get_project_folder(selected_project)
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 1, "Database initialization probably failed!")
        data = dict(name="test_project", description="test_description", users=[])
        self.assertRaises(ProjectServiceException, self.project_service.store_project,
                          self.test_user, False, 99, **data)

    
    def test_find_project_happy_flow(self):
        """
        Standard flow for finding a project by it's id.
        """
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0, "Database reset probably failed!")
        inserted_project = TestFactory.create_project(self.test_user, 'test_project')
        self.assertTrue(self.project_service.find_project(inserted_project.id) is not None, "Project not found !")
        dao_returned_project = dao.get_project_by_id(inserted_project.id)
        service_returned_project = self.project_service.find_project(inserted_project.id)
        self.assertEqual(dao_returned_project.id, service_returned_project.id,
                         "Data returned from service is different from data returned by DAO.")
        self.assertEqual(dao_returned_project.name, service_returned_project.name, 
                         "Data returned from service is different than  data returned by DAO.")  
        self.assertEqual(dao_returned_project.description, service_returned_project.description,
                         "Data returned from service is different from data returned by DAO.")        
        self.assertEqual(dao_returned_project.members, service_returned_project.members,
                         "Data returned from service is different from data returned by DAO.")
                      
        
    def test_find_project_unexisting(self):
        """
        Searching for an un-existing project.
        """
        data = dict(name="test_project", description="test_description", users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0, "Database reset probably failed!")
        self.project_service.store_project(self.test_user, True, None, **data)
        self.assertRaises(ProjectServiceException, self.project_service.find_project, 99)  
        
        
    def test_retrieve_projects_for_user(self):
        """
        Test for retrieving the projects for a given user. One page only.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        self.assertEqual(len(initial_projects), 0, "Database was not reset properly!")
        TestFactory.create_project(self.test_user, 'test_proj')
        TestFactory.create_project(self.test_user, 'test_proj1')
        TestFactory.create_project(self.test_user, 'test_proj2')
        user1 = TestFactory.create_user('another_user')
        TestFactory.create_project(user1, 'test_proj3')
        projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
        for project in projects:
            self.assertNotEquals(project.name, "test_project3", "This project should not have been retrieved")   
            
            
    def test_retrieve_1project_3usr(self):
        """
        One user as admin, two users as members, getting projects for admin and for any of
        the members should return one.
        """
        member1 = TestFactory.create_user("member1")
        member2 = TestFactory.create_user("member2")
        TestFactory.create_project(self.test_user, 'Testproject', users=[member1.id, member2.id])
        projects = self.project_service.retrieve_projects_for_user(self.test_user.id, 1)[0]
        self.assertEqual(len(projects), 1, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(member1.id, 1)[0]
        self.assertEqual(len(projects), 1, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(member2.id, 1)[0]
        self.assertEqual(len(projects), 1, "Projects not retrieved properly!")
        
        
    def test_retrieve_3projects_3usr(self):
        """
        Three users, 3 projects. Structure of db:
        proj1: {admin: user1, members: [user2, user3]}
        proj2: {admin: user2, members: [user1]}
        proj3: {admin: user3, members: [user1, user2]}
        Check valid project returns for all the users.
        """
        member1 = TestFactory.create_user("member1")
        member2 = TestFactory.create_user("member2")
        member3 = TestFactory.create_user("member3")
        TestFactory.create_project(member1, 'TestProject1', users=[member2.id, member3.id])
        TestFactory.create_project(member2, 'TestProject2', users=[member1.id])
        TestFactory.create_project(member3, 'TestProject3', users=[member1.id, member2.id])
        projects = self.project_service.retrieve_projects_for_user(member1.id, 1)[0]
        self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(member2.id, 1)[0]
        self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(member3.id, 1)[0]
        self.assertEqual(len(projects), 2, "Projects not retrieved properly!")
        
        
    def test_retrieve_projects_random(self):
        """
        Generate a large number of users/projects, and validate the results.
        """
        ExtremeTestFactory.generate_users(NR_USERS, MAX_PROJ_PER_USER)
        for i in range(NR_USERS):
            current_user = dao.get_user_by_name("gen" + str(i))
            expected_projects = ExtremeTestFactory.VALIDATION_DICT[current_user.id]
            if expected_projects % PROJECTS_PAGE_SIZE == 0:
                expected_pages = expected_projects / PROJECTS_PAGE_SIZE
                exp_proj_per_page = PROJECTS_PAGE_SIZE
            else:
                expected_pages = expected_projects / PROJECTS_PAGE_SIZE + 1
                exp_proj_per_page = expected_projects % PROJECTS_PAGE_SIZE
            if expected_projects == 0:
                expected_pages = 0
                exp_proj_per_page = 0
            projects, pages = self.project_service.retrieve_projects_for_user(current_user.id, expected_pages)
            self.assertEqual(len(projects), exp_proj_per_page, "Projects not retrieved properly! Expected:" +
                             str(exp_proj_per_page) + "but got:" + str(len(projects)))
            self.assertEqual(pages, expected_pages, "Pages not retrieved properly!")

        for folder in os.listdir(TvbProfile.current.TVB_STORAGE):
            full_path = os.path.join(TvbProfile.current.TVB_STORAGE, folder)
            if os.path.isdir(full_path) and folder.startswith('Generated'): 
                shutil.rmtree(full_path)
        
            
    def test_retrieve_projects_page2(self):
        """
        Test for retrieving the second page projects for a given user.
        """
        for i in range(PROJECTS_PAGE_SIZE + 3):
            TestFactory.create_project(self.test_user, 'test_proj' + str(i))
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id, 2)
        self.assertEqual(len(projects), (PROJECTS_PAGE_SIZE + 3) % PROJECTS_PAGE_SIZE, "Pagination inproper.")
        self.assertEqual(pages, 2, 'Wrong number of pages retrieved.')
        
        
    def test_retrieve_projects_and_del(self):
        """
        Test for retrieving the second page projects for a given user.
        """
        created_projects = []
        for i in range(PROJECTS_PAGE_SIZE + 1):
            created_projects.append(TestFactory.create_project(self.test_user, 'test_proj' + str(i)))
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id, 2)
        self.assertEqual(len(projects), (PROJECTS_PAGE_SIZE + 1) % PROJECTS_PAGE_SIZE, "Pagination improper.")
        self.assertEqual(pages, (PROJECTS_PAGE_SIZE + 1) / PROJECTS_PAGE_SIZE + 1, 'Wrong number of pages')
        self.project_service.remove_project(created_projects[1].id)
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id, 2)
        self.assertEqual(len(projects), 0, "Pagination improper.")
        self.assertEqual(pages, 1, 'Wrong number of pages retrieved.')
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id, 1)
        self.assertEqual(len(projects), PROJECTS_PAGE_SIZE, "Pagination improper.")
        self.assertEqual(pages, 1, 'Wrong number of pages retrieved.')


    def test_empty_project_has_zero_disk_size(self):
        TestFactory.create_project(self.test_user, 'test_proj')
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id)
        self.assertEqual(0, projects[0].disk_size)
        self.assertEqual('0.0 KiB', projects[0].disk_size_human)


    def test_project_disk_size(self):
        project1 = TestFactory.create_project(self.test_user, 'test_proj1')
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip')
        TestFactory.import_zip_connectivity(self.test_user, project1, 'testSubject', zip_path)

        project2 = TestFactory.create_project(self.test_user, 'test_proj2')
        TestFactory.import_cff(test_user=self.test_user, test_project=project2)

        projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        self.assertNotEqual(projects[0].disk_size, projects[1].disk_size, "projects should have different size")

        for project in projects:
            self.assertNotEqual(0, project.disk_size)
            self.assertNotEqual('0.0 KiB', project.disk_size_human)

            prj_folder = self.structure_helper.get_project_folder(project)
            actual_disk_size = self.compute_recursive_h5_disk_usage(prj_folder)[0]

            ratio = float(actual_disk_size) / project.disk_size
            msg = "Real disk usage: %s The one recorded in the db : %s" % (actual_disk_size, project.disk_size)
            self.assertTrue(ratio < 1.4, msg)


    def test_get_linkable_projects(self):
        """
        Test for retrieving the projects for a given user.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        self.assertEqual(len(initial_projects), 0, "Database was not reset!")
        test_proj = []
        user1 = TestFactory.create_user("another_user")
        for i in range(4):
            test_proj.append(TestFactory.create_project(self.test_user if i < 3 else user1, 'test_proj' + str(i)))

        project_storage = self.structure_helper.get_project_folder(test_proj[0])

        operation = TestFactory.create_operation(test_user=self.test_user, test_project=test_proj[0])

        project_storage = os.path.join(project_storage, str(operation.id))
        os.makedirs(project_storage)
        datatype = dao.store_entity(model.DataType(module="test_data", subject="subj1", 
                                                   state="test_state", operation_id=operation.id))
        linkable = self.project_service.get_linkable_projects_for_user(self.test_user.id, str(datatype.id))[0]
        self.assertEqual(len(linkable), 2, "Wrong count of link-able projects!")
        proj_names = [project.name for project in linkable]
        self.assertTrue(test_proj[1].name in proj_names)
        self.assertTrue(test_proj[2].name in proj_names)
        self.assertFalse(test_proj[3].name in proj_names)    
    
    
    def test_remove_project_happy_flow(self):
        """
        Standard flow for deleting a project.
        """
        inserted_project = TestFactory.create_project(self.test_user, 'test_proj')
        project_root = self.structure_helper.get_project_folder(inserted_project)
        projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(projects), 1, "Initializations failed!") 
        self.assertTrue(os.path.exists(project_root), "Something failed at insert time!")
        self.project_service.remove_project(inserted_project.id)
        projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(projects), 0, "Project was not deleted!")  
        self.assertFalse(os.path.exists(project_root), "Root folder not deleted!")  
        
        
    def test_remove_project_wrong_id(self):
        """
        Flow for deleting a project giving an un-existing id.
        """
        TestFactory.create_project(self.test_user, 'test_proj')
        projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(projects), 1, "Initializations failed!") 
        self.assertRaises(ProjectServiceException, self.project_service.remove_project, 99)   
    

    @staticmethod
    def _create_value_wrapper(test_user, test_project=None):
        """
        Creates a ValueWrapper dataType, and the associated parent Operation.
        This is also used in ProjectStructureTest.
        """
        if test_project is None:
            test_project = TestFactory.create_project(test_user, 'test_proj')
        operation = TestFactory.create_operation(test_user=test_user, test_project=test_project)
        value_wrapper = ValueWrapper(data_value=5.0, data_name="my_value")
        value_wrapper.type = "ValueWrapper"
        value_wrapper.module = "tvb.datatypes.mapped_values"
        value_wrapper.subject = "John Doe"
        value_wrapper.state = "RAW_STATE"
        value_wrapper.set_operation_id(operation.id)
        adapter_instance = StoreAdapter([value_wrapper])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        all_value_wrappers = FlowService().get_available_datatypes(test_project.id,
                                                                   "tvb.datatypes.mapped_values.ValueWrapper")[0]
        if len(all_value_wrappers) != 1:
            raise Exception("Should be only one value wrapper.")
        result_vw = ABCAdapter.load_entity_by_gid(all_value_wrappers[0][2])
        return test_project, result_vw.gid, operation.gid

     
    def __check_meta_data(self, expected_meta_data, new_datatype):
        """Validate Meta-Data"""
        mapp_keys = {DataTypeMetaData.KEY_SUBJECT: "subject", DataTypeMetaData.KEY_STATE: "state"}
        for key, value in expected_meta_data.iteritems():
            if key in mapp_keys:
                self.assertEqual(value, getattr(new_datatype, mapp_keys[key]))
            elif key == DataTypeMetaData.KEY_OPERATION_TAG:
                if DataTypeMetaData.KEY_OP_GROUP_ID in expected_meta_data:
                    ## We have a Group to check
                    op_group = new_datatype.parent_operation.fk_operation_group
                    op_group = dao.get_generic_entity(model.OperationGroup, op_group)[0]
                    self.assertEqual(value, op_group.name)
                else:
                    self.assertEqual(value, new_datatype.parent_operation.user_group) 
    
    
    def test_remove_project_node(self):
        """
        Test removing of a node from a project.
        """
        inserted_project, gid, gid_op = self._create_value_wrapper(self.test_user) 
        project_to_link = model.Project("Link", self.test_user.id, "descript")
        project_to_link = dao.store_entity(project_to_link)
        exact_data = dao.get_datatype_by_gid(gid)
        dao.store_entity(model.Links(exact_data.id, project_to_link.id))
        self.assertTrue(dao.get_datatype_by_gid(gid) is not None, "Initialization problem!")
        
        operation_id = dao.get_generic_entity(model.Operation, gid_op, 'gid')[0].id
        op_folder = self.structure_helper.get_project_folder("test_proj", str(operation_id))
        self.assertTrue(os.path.exists(op_folder))
        sub_files = os.listdir(op_folder)
        self.assertEqual(2, len(sub_files))
        ### Validate that no more files are created than needed.
        
        self.project_service._remove_project_node_files(inserted_project.id, gid)
        sub_files = os.listdir(op_folder)
        self.assertEqual(1, len(sub_files))
        ### operation.xml file should still be there
        
        op_folder = self.structure_helper.get_project_folder("Link", str(operation_id + 1)) 
        sub_files = os.listdir(op_folder)
        self.assertEqual(2, len(sub_files))
        self.assertTrue(dao.get_datatype_by_gid(gid) is not None, "Data should still be in DB, because of links")
        self.project_service._remove_project_node_files(project_to_link.id, gid)
        self.assertTrue(dao.get_datatype_by_gid(gid) is None)  
        sub_files = os.listdir(op_folder)
        self.assertEqual(1, len(sub_files))
        ### operation.xml file should still be there
        
        
    def test_update_meta_data_simple(self):
        """
        Test the new update metaData for a simple data that is not part of a group.
        """
        inserted_project, gid, _ = self._create_value_wrapper(self.test_user)
        new_meta_data = {DataTypeOverlayDetails.DATA_SUBJECT: "new subject",
                         DataTypeOverlayDetails.DATA_STATE: "second_state",
                         DataTypeOverlayDetails.CODE_GID: gid,
                         DataTypeOverlayDetails.CODE_OPERATION_TAG: 'new user group'}
        self.project_service.update_metadata(new_meta_data)
        
        new_datatype = dao.get_datatype_by_gid(gid)
        self.__check_meta_data(new_meta_data, new_datatype)
        
        op_path = FilesHelper().get_operation_meta_file_path(inserted_project.name, new_datatype.parent_operation.id)
        op_meta = XMLReader(op_path).read_metadata()
        self.assertEqual(op_meta['user_group'], 'new user group', 'UserGroup not updated!')


    def test_update_meta_data_group(self):
        """
        Test the new update metaData for a group of dataTypes.
        """
        datatypes, group_id = TestFactory.create_group(self.test_user, subject="test-subject-1")

        new_meta_data = {DataTypeOverlayDetails.DATA_SUBJECT: "new subject",
                         DataTypeOverlayDetails.DATA_STATE: "updated_state",
                         DataTypeOverlayDetails.CODE_OPERATION_GROUP_ID: group_id,
                         DataTypeOverlayDetails.CODE_OPERATION_TAG: 'newGroupName'}
        self.project_service.update_metadata(new_meta_data)  
          
        for datatype in datatypes:
            new_datatype = dao.get_datatype_by_id(datatype.id)
            self.assertEqual(group_id, new_datatype.parent_operation.fk_operation_group)
            new_group = dao.get_generic_entity(model.OperationGroup, group_id)[0]
            self.assertEqual(new_group.name, "newGroupName") 
            self.__check_meta_data(new_meta_data, new_datatype)
            
    
    def _create_datatypes(self, dt_factory, nr_of_dts):
        for idx in range(nr_of_dts):
            dt = Datatype1()
            dt.row1 = "value%i" % (idx,)
            dt.row2 = "value%i" % (idx + 1,)
            dt_factory._store_datatype(dt)
            
            
    def test_retrieve_project_full(self):
        """
        Tests full project information is retrieved by method `ProjectService.retrieve_project_full(...)`
        """
        dt_factory = datatypes_factory.DatatypesFactory()
        self._create_datatypes(dt_factory, 3)
        _, ops_nr, operations, pages_no = self.project_service.retrieve_project_full(dt_factory.project.id)
        self.assertEqual(ops_nr, 1, "DataType Factory should only use one operation to store all it's datatypes.")
        self.assertEqual(pages_no, 1, "DataType Factory should only use one operation to store all it's datatypes.")
        resulted_dts = operations[0]['results']
        self.assertEqual(len(resulted_dts), 3, "3 datatypes should be created.")
        
        
    def test_get_project_structure(self):
        """
        Tests project structure is as expected and contains all datatypes
        """
        SELF_DTS_NUMBER = 3
        dt_factory_1 = datatypes_factory.DatatypesFactory()
        self._create_datatypes(dt_factory_1, SELF_DTS_NUMBER)
        dt_group = dt_factory_1.create_datatype_group()

        link_ids, expected_links = [], []
        # Prepare link towards a simple DT
        dt_factory_2 = datatypes_factory.DatatypesFactory()
        dt_to_link = dt_factory_2.create_simple_datatype()
        link_ids.append(dt_to_link.id)
        expected_links.append(dt_to_link.gid)

        # Prepare links towards a full DT Group, but expecting only the DT_Group in the final tree
        link_gr = dt_factory_2.create_datatype_group()
        dts = dao.get_datatype_in_group(datatype_group_id=link_gr.id)
        link_ids.extend([dt_to_link.id for dt_to_link in dts])
        link_ids.append(link_gr.id)
        expected_links.append(link_gr.gid)

        # Prepare link towards a single DT inside a group, and expecting to find the DT in the final tree
        link_gr = dt_factory_2.create_datatype_group()
        dt_to_link = dao.get_datatype_in_group(datatype_group_id=link_gr.id)[0]
        link_ids.append(dt_to_link.id)
        expected_links.append(dt_to_link.gid)

        # Actually create the links from Prj2 into Prj1
        FlowService().create_link(link_ids, dt_factory_1.project.id)

        # Retrieve the raw data used to compose the tree (for easy parsing)
        dts_in_tree = dao.get_data_in_project(dt_factory_1.project.id)
        dts_in_tree = [dt.gid for dt in dts_in_tree]
        # Retrieve the tree json (for trivial validations only, as we can not decode)
        node_json = self.project_service.get_project_structure(dt_factory_1.project, None, DataTypeMetaData.KEY_STATE,
                                                               DataTypeMetaData.KEY_SUBJECT, None)

        self.assertEqual(len(expected_links) + SELF_DTS_NUMBER + 2, len(dts_in_tree), "invalid number of nodes in tree")
        self.assertFalse(link_gr.gid in dts_in_tree, "DT_group where a single DT is linked is not expected.")
        self.assertTrue(dt_group.gid in dts_in_tree, "DT_Group should be in the Project Tree!")
        self.assertTrue(dt_group.gid in node_json, "DT_Group should be in the Project Tree JSON!")

        project_dts = dao.get_datatypes_in_project(dt_factory_1.project.id)
        for dt in project_dts:
            if dt.fk_datatype_group is not None:
                self.assertFalse(dt.gid in node_json, "DTs part of a group should not be")
                self.assertFalse(dt.gid in dts_in_tree, "DTs part of a group should not be")
            else:
                self.assertTrue(dt.gid in node_json, "Simple DTs and DT_Groups should be")
                self.assertTrue(dt.gid in dts_in_tree, "Simple DTs and DT_Groups should be")

        for link_gid in expected_links:
            self.assertTrue(link_gid in node_json, "Expected Link not present")
            self.assertTrue(link_gid in dts_in_tree, "Expected Link not present")
class ImportServiceTest(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.services.import_service module.
    """  
    
    def setUp(self):
        """
        Reset the database before each test.
        """
        self.import_service = ImportService()
        self.flow_service = FlowService()
        self.project_service = ProjectService()
        
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user, name="GeneratedProject", description="test_desc")
        self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
        self.adapter_instance = TestFactory.create_adapter(test_project=self.test_project)
        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        self.zip_path = None 
        

    def tearDown(self):
        """
        Reset the database when test is done.
        """
        ### Delete TEMP folder
        if os.path.exists(TvbProfile.current.TVB_TEMP_FOLDER):
            shutil.rmtree(TvbProfile.current.TVB_TEMP_FOLDER)
        
        ### Delete folder where data was exported
        if self.zip_path and os.path.exists(self.zip_path):
            shutil.rmtree(os.path.split(self.zip_path)[0])
            
        self.delete_project_folders()

            
    def test_import_export(self):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        result = self.get_all_datatypes()
        expected_results = {}
        for one_data in result:
            expected_results[one_data.gid] = (one_data.module, one_data.type)
        
        #create an array mapped in DB
        data = {'param_1': 'some value'}
        OperationService().initiate_prelaunch(self.operation, self.adapter_instance, {}, **data)
        inserted = self.flow_service.get_available_datatypes(self.test_project.id,
                                                             "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(1, inserted, "Problems when inserting data")
        
        #create a value wrapper
        value_wrapper = self._create_value_wrapper()
        count_operations = dao.get_filtered_operations(self.test_project.id, None, is_count=True)
        self.assertEqual(2, count_operations, "Invalid ops number before export!")

        # Export project as ZIP
        self.zip_path = ExportManager().export_project(self.test_project)
        self.assertTrue(self.zip_path is not None, "Exported file is none")
        
        # Remove the original project
        self.project_service.remove_project(self.test_project.id)
        result, lng_ = self.project_service.retrieve_projects_for_user(self.test_user.id)
        self.assertEqual(0, len(result), "Project Not removed!")
        self.assertEqual(0, lng_, "Project Not removed!")
        
        # Now try to import again project
        self.import_service.import_project_structure(self.zip_path, self.test_user.id)
        result = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        self.assertEqual(len(result), 1, "There should be only one project.")
        self.assertEqual(result[0].name, "GeneratedProject", "The project name is not correct.")
        self.assertEqual(result[0].description, "test_desc", "The project description is not correct.")
        self.test_project = result[0]
        
        count_operations = dao.get_filtered_operations(self.test_project.id, None, is_count=True)
        
        #1 op. - import cff; 2 op. - save the array wrapper;
        self.assertEqual(2, count_operations, "Invalid ops number after export and import !")
        for gid in expected_results:
            datatype = dao.get_datatype_by_gid(gid)
            self.assertEqual(datatype.module, expected_results[gid][0], 'DataTypes not imported correctly')
            self.assertEqual(datatype.type, expected_results[gid][1], 'DataTypes not imported correctly')
        #check the value wrapper
        new_val = self.flow_service.get_available_datatypes(self.test_project.id, 
                                                            "tvb.datatypes.mapped_values.ValueWrapper")[0]
        self.assertEqual(1, len(new_val), "One !=" + str(len(new_val)))
        new_val = ABCAdapter.load_entity_by_gid(new_val[0][2])
        self.assertEqual(value_wrapper.data_value, new_val.data_value, "Data value incorrect")
        self.assertEqual(value_wrapper.data_type, new_val.data_type, "Data type incorrect")
        self.assertEqual(value_wrapper.data_name, new_val.data_name, "Data name incorrect")
        

    def test_import_export_existing(self):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        count_operations = dao.get_filtered_operations(self.test_project.id, None, is_count=True)
        self.assertEqual(2, count_operations, "Invalid ops before export!")

        self.zip_path = ExportManager().export_project(self.test_project)
        self.assertTrue(self.zip_path is not None, "Exported file is none")

        self.assertRaises(ProjectImportException, self.import_service.import_project_structure,
                          self.zip_path, self.test_user.id)


    def _create_timeseries(self):
        """Launch adapter to persist a TimeSeries entity"""
        activity_data = numpy.array([[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12]])
        time_data = numpy.array([1, 2, 3])
        storage_path = FilesHelper().get_project_folder(self.test_project)
        time_series = TimeSeries(time_files=None, activity_files=None, 
                                 max_chunk=10, maxes=None, mins=None, data_shape=numpy.shape(activity_data), 
                                 storage_path=storage_path, label_y="Time", time_data=time_data, data_name='TestSeries',
                                 activity_data=activity_data, sample_period=10.0)
        self._store_entity(time_series, "TimeSeries", "tvb.datatypes.time_series")
        timeseries_count = self.flow_service.get_available_datatypes(self.test_project.id,
                                                                     "tvb.datatypes.time_series.TimeSeries")[1]
        self.assertEqual(timeseries_count, 1, "Should be only one TimeSeries")


    def _create_value_wrapper(self):
        """Persist ValueWrapper"""
        value_ = ValueWrapper(data_value=5.0, data_name="my_value")
        self._store_entity(value_, "ValueWrapper", "tvb.datatypes.mapped_values")
        valuew = self.flow_service.get_available_datatypes(self.test_project.id,
                                                           "tvb.datatypes.mapped_values.ValueWrapper")[0]
        self.assertEqual(len(valuew), 1, "Should be only one value wrapper")
        return ABCAdapter.load_entity_by_gid(valuew[0][2])


    def _store_entity(self, entity, type_, module):
        """Launch adapter to store a create a persistent DataType."""
        entity.type = type_
        entity.module = module
        entity.subject = "John Doe"
        entity.state = "RAW_STATE"
        entity.set_operation_id(self.operation.id)
        adapter_instance = StoreAdapter([entity])
        OperationService().initiate_prelaunch(self.operation, adapter_instance, {})
    def create_json(self, item_gid, item_type, visibility_filter):
        """
        Method used for creating a JSON representation of a graph.
        """
        selected_filter = StaticFiltersFactory.build_datatype_filters(single_filter=visibility_filter)
        project = common.get_current_project()

        is_upload_operation = (item_type == graph_structures.NODE_OPERATION_TYPE) and \
                              (self.project_service.is_upload_operation(item_gid) or item_gid == "firstOperation")
        if is_upload_operation:
            graph_branches = []
            uploader_operations = self.project_service.get_all_operations_for_uploaders(project.id)
            for operation in uploader_operations:
                dt_outputs = self.project_service.get_results_for_operation(operation.id, selected_filter)
                dt_outputs = self._create_datatype_nodes(dt_outputs)
                parent_op = self._create_operation_nodes([operation], item_gid)
                branch = graph_structures.GraphComponent([], parent_op, dt_outputs, [])
                graph_branches.append(branch)
            graph = graph_structures.FullGraphStructure(graph_branches)
            return graph.prepare_for_json()

        dt_inputs, parent_op, dt_outputs, op_inputs = [], [], [], []
        if item_type == graph_structures.NODE_OPERATION_TYPE:
            dt_inputs = ProjectService.get_datatype_and_datatypegroup_inputs_for_operation(item_gid, selected_filter)
            parent_op = self.project_service.load_operation_by_gid(item_gid)
            dt_outputs = self.project_service.get_results_for_operation(parent_op.id, selected_filter)
            #create graph nodes
            dt_inputs, parent_op, dt_outputs, op_inputs = self._create_nodes(dt_inputs, [parent_op],
                                                                             dt_outputs, [], item_gid)

        elif item_type == graph_structures.NODE_OPERATION_GROUP_TYPE:
            parent_op_group = self.project_service.get_operation_group_by_gid(item_gid)
            dt_inputs = self.project_service.get_datatypes_inputs_for_operation_group(parent_op_group.id,
                                                                                      selected_filter)
            datatype_group = self.project_service.get_datatypegroup_by_op_group_id(parent_op_group.id)
            datatype = self.project_service.get_datatype_by_id(datatype_group.id)

            dt_inputs = self._create_datatype_nodes(dt_inputs)
            parent_op = graph_structures.NodeStructure.build_structure_for_operation_group(parent_op_group.gid)
            parent_op.selected = True
            parent_op = [parent_op]
            if selected_filter.display_name == StaticFiltersFactory.RELEVANT_VIEW and datatype.visible is False:
                dt_outputs = []
            else:
                dt_outputs = self._create_datatype_nodes([datatype])

        elif item_type == graph_structures.NODE_DATATYPE_TYPE:
            selected_dt = ABCAdapter.load_entity_by_gid(item_gid)
            if self.project_service.is_datatype_group(item_gid):
                datatype_group = self.project_service.get_datatypegroup_by_gid(selected_dt.gid)
                parent_op_group = self.project_service.get_operation_group_by_id(datatype_group.fk_operation_group)
                dt_inputs = self.project_service.get_datatypes_inputs_for_operation_group(parent_op_group.id,
                                                                                          selected_filter)
                op_inputs = self.project_service.get_operations_for_datatype_group(selected_dt.id, selected_filter)
                op_inputs_in_groups = self.project_service.get_operations_for_datatype_group(selected_dt.id,
                                                                                             selected_filter,
                                                                                             only_in_groups=True)
                #create graph nodes
                dt_inputs, parent_op, dt_outputs, op_inputs = self._create_nodes(dt_inputs, [], [selected_dt],
                                                                                 op_inputs, item_gid)
                parent_op = [graph_structures.NodeStructure.build_structure_for_operation_group(parent_op_group.gid)]
                op_inputs_in_groups = self._create_operation_group_nodes(op_inputs_in_groups)
                op_inputs.extend(op_inputs_in_groups)
            else:
                parent_op = self.flow_service.load_operation(selected_dt.fk_from_operation)
                dt_inputs = ProjectService.get_datatype_and_datatypegroup_inputs_for_operation(parent_op.gid,
                                                                                               selected_filter)
                op_inputs = self.project_service.get_operations_for_datatype(selected_dt.gid, selected_filter)
                op_inputs_in_groups = self.project_service.get_operations_for_datatype(selected_dt.gid, selected_filter,
                                                                                       only_in_groups=True)
                dt_outputs = self.project_service.get_results_for_operation(parent_op.id, selected_filter)
                #create graph nodes
                dt_inputs, parent_op, dt_outputs, op_inputs = self._create_nodes(dt_inputs, [parent_op], dt_outputs,
                                                                                 op_inputs, item_gid)
                op_inputs_in_groups = self._create_operation_group_nodes(op_inputs_in_groups)
                op_inputs.extend(op_inputs_in_groups)

        else:
            self.logger.error("Invalid item type: " + str(item_type))
            raise Exception("Invalid item type.")

        branch = graph_structures.GraphComponent(dt_inputs, parent_op, dt_outputs, op_inputs)
        graph = graph_structures.FullGraphStructure([branch])
        return graph.prepare_for_json()
 def __init__(self):
     super(ProjectController, self).__init__()
     self.project_service = ProjectService()
 def setUp(self):
     """
     Reset the database before each test.
     """
     self.project_service = ProjectService()
     self.test_user = TestFactory.create_user()
示例#55
0
class RemoveTest(TransactionalTestCase):
    """
    This class contains tests for the service layer related to remove of DataTypes.
    """


    def setUp(self):
        """
        Prepare the database before each test.
        """
        self.import_service = ImportService()
        self.flow_service = FlowService()
        self.project_service = ProjectService()
        self.test_user = TestFactory.create_user()

        self.delete_project_folders()
        result = self.count_all_entities(DataType)
        self.assertEqual(0, result, "There should be no data type in DB")
        result = self.count_all_entities(Project)
        self.assertEqual(0, result)

        self.test_project = TestFactory.import_default_project(self.test_user)
        self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
        self.adapter_instance = TestFactory.create_adapter(test_project=self.test_project)


    def tearDown(self):
        """
        Reset the database when test is done.
        """
        self.delete_project_folders()


    def test_remove_used_connectivity(self):
        """
        Tests the remove of a connectivity which is used by other data types
        """
        conn, conn_count = self.flow_service.get_available_datatypes(self.test_project.id, Connectivity)
        count_rm = self.count_all_entities(RegionMapping)
        self.assertEqual(1, conn_count)
        self.assertEqual(1, count_rm)

        conn_gid = conn[0][2]
        try:
            self.project_service.remove_datatype(self.test_project.id, conn_gid)
            self.fail("The connectivity is still used. It should not be possible to remove it." + str(conn_gid))
        except RemoveDataTypeException:
            #OK, do nothing
            pass

        res = dao.get_datatype_by_gid(conn_gid)
        self.assertEqual(conn[0].id, res.id, "Used connectivity removed")


    def test_remove_used_surface(self):
        """
        Tries to remove an used surface
        """
        mapping, mapping_count = self.flow_service.get_available_datatypes(self.test_project.id, RegionMapping)
        self.assertEquals(1, mapping_count, "There should be one Mapping.")
        mapping_gid = mapping[0][2]
        mapping = ABCAdapter.load_entity_by_gid(mapping_gid)
        surface = dao.get_datatype_by_gid(mapping.surface.gid)
        self.assertEqual(surface.gid, mapping.surface.gid, "The surfaces should have the same GID")
        try:
            self.project_service.remove_datatype(self.test_project.id, surface.gid)
            self.fail("The surface should still be used by a RegionMapping " + str(surface.gid))
        except RemoveDataTypeException:
            #OK, do nothing
            pass

        res = dao.get_datatype_by_gid(surface.gid)
        self.assertEqual(surface.id, res.id, "A used surface was deleted")


    def _remove_entity(self, data_class, before_number):
        """
        Try to remove entity. Fail otherwise.
        """
        dts, count = self.flow_service.get_available_datatypes(self.test_project.id, data_class)
        self.assertEquals(count, before_number)
        for dt in dts:
            data_gid = dt[2]
            self.project_service.remove_datatype(self.test_project.id, data_gid)
            res = dao.get_datatype_by_gid(data_gid)
            self.assertEqual(None, res, "The entity was not deleted")


    def test_happyflow_removedatatypes(self):
        """
        Tests the happy flow for the deletion multiple entities.
        They are tested together because they depend on each other and they
        have to be removed in a certain order.
        """
        self._remove_entity(LocalConnectivity, 1)
        self._remove_entity(RegionMapping, 1)
        ### Remove Surfaces
        # SqlAlchemy has no uniform way to retrieve Surface as base (wild-character for polymorphic_identity)
        self._remove_entity(SurfaceData, 4)
        ### Remove a Connectivity
        self._remove_entity(Connectivity, 1)


    def test_remove_time_series(self):
        """
        Tests the happy flow for the deletion of a time series.
        """
        count_ts = self.count_all_entities(TimeSeries)
        self.assertEqual(0, count_ts, "There should be no time series")
        self._create_timeseries()
        series = self.get_all_entities(TimeSeries)
        self.assertEqual(1, len(series), "There should be only one time series")
        self.project_service.remove_datatype(self.test_project.id, series[0].gid)
        res = dao.get_datatype_by_gid(series[0].gid)
        self.assertEqual(None, res, "The time series was not deleted.")


    def test_remove_array_wrapper(self):
        """
        Tests the happy flow for the deletion of an array wrapper.
        """
        count_array = self.count_all_entities(MappedArray)
        self.assertEqual(1, count_array)
        data = {'param_1': 'some value'}
        OperationService().initiate_prelaunch(self.operation, self.adapter_instance, {}, **data)
        array_wrappers = self.get_all_entities(MappedArray)
        self.assertEqual(2, len(array_wrappers))
        array_gid = array_wrappers[0].gid
        self.project_service.remove_datatype(self.test_project.id, array_gid)
        res = dao.get_datatype_by_gid(array_gid)
        self.assertEqual(None, res, "The array wrapper was not deleted.")


    def test_remove_value_wrapper(self):
        """
        Test the deletion of a value wrapper dataType
        """
        count_vals = self.count_all_entities(ValueWrapper)
        self.assertEqual(0, count_vals, "There should be no value wrapper")
        value_wrapper = self._create_value_wrapper()
        self.project_service.remove_datatype(self.test_project.id, value_wrapper.gid)
        res = dao.get_datatype_by_gid(value_wrapper.gid)
        self.assertEqual(None, res, "The value wrapper was not deleted.")


    def _create_timeseries(self):
        """Launch adapter to persist a TimeSeries entity"""
        storage_path = FilesHelper().get_project_folder(self.test_project, str(self.operation.id))

        time_series = TimeSeries()
        time_series.sample_period = 10.0
        time_series.start_time = 0.0
        time_series.storage_path = storage_path
        time_series.write_data_slice(numpy.array([1.0, 2.0, 3.0]))
        time_series.close_file()
        time_series.sample_period_unit = 'ms'

        self._store_entity(time_series, "TimeSeries", "tvb.datatypes.time_series")
        count_ts = self.count_all_entities(TimeSeries)
        self.assertEqual(1, count_ts, "Should be only one TimeSeries")


    def _create_value_wrapper(self):
        """Persist ValueWrapper"""
        value_ = ValueWrapper(data_value=5.0, data_name="my_value")
        self._store_entity(value_, "ValueWrapper", "tvb.datatypes.mapped_values")
        valuew = self.get_all_entities(ValueWrapper)
        self.assertEqual(1, len(valuew), "Should be one value wrapper")
        return ABCAdapter.load_entity_by_gid(valuew[0].gid)


    def _store_entity(self, entity, type_, module):
        """Launch adapter to store a create a persistent DataType."""
        entity.type = type_
        entity.module = module
        entity.subject = "John Doe"
        entity.state = "RAW_STATE"
        entity.set_operation_id(self.operation.id)
        adapter_instance = StoreAdapter([entity])
        OperationService().initiate_prelaunch(self.operation, adapter_instance, {})