class ProjectServiceTest(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.services.project_service module.
    """    
    
    def setUp(self):
        """
        Reset the database before each test.
        """
        config.EVENTS_FOLDER = ''
        self.project_service = ProjectService()
        self.structure_helper = FilesHelper()
        self.test_user = TestFactory.create_user()
    
    
    def tearDown(self):
        """
        Remove project folders and clean up database.
        """
        created_projects = dao.get_projects_for_user(self.test_user.id)
        for project in created_projects:
            self.structure_helper.remove_project_structure(project.name)
        self.delete_project_folders()
    
    
    def test_create_project_happy_flow(self):
        """
        Standard flow for creating a new project.
        """
        user1 = TestFactory.create_user('test_user1')
        user2 = TestFactory.create_user('test_user2')
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0, "Database reset probably failed!")
        TestFactory.create_project(self.test_user, 'test_project', users=[user1.id, user2.id])
        resulting_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(resulting_projects), 1, "Project with valid data not inserted!")  
        project = resulting_projects[0]
        if project.name == "test_project":
            self.assertEqual(project.description, "description", "Description do no match")
            users_for_project = dao.get_members_of_project(project.id)
            for user in users_for_project:
                self.assertTrue(user.id in [user1.id, user2.id], "Users not stored properly.")
        self.assertTrue(os.path.exists(os.path.join(TvbProfile.current.TVB_STORAGE, FilesHelper.PROJECTS_FOLDER,
                                                    "test_project")), "Folder for project was not created")
   
   
    def test_create_project_empty_name(self):
        """
        Creating a project with an empty name.
        """
        data = dict(name="", description="test_description", users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0, "Database reset probably failed!")
        self.assertRaises(ProjectServiceException, self.project_service.store_project, 
                          self.test_user, True, None, **data)
   
   
    def test_edit_project_happy_flow(self):
        """
        Standard flow for editing an existing project.
        """
        selected_project = TestFactory.create_project(self.test_user, 'test_proj')
        proj_root = self.structure_helper.get_project_folder(selected_project)
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 1, "Database initialization probably failed!")
        
        edited_data = dict(name="test_project", description="test_description", users=[])
        edited_project = self.project_service.store_project(self.test_user, False, selected_project.id, **edited_data)
        self.assertFalse(os.path.exists(proj_root), "Previous folder not deleted")
        proj_root = self.structure_helper.get_project_folder(edited_project)
        self.assertTrue(os.path.exists(proj_root), "New folder not created!")
        self.assertNotEqual(selected_project.name, edited_project.name, "Project was no changed!")  
        
             
    def test_edit_project_unexisting(self):
        """
        Trying to edit an un-existing project.
        """
        selected_project = TestFactory.create_project(self.test_user, 'test_proj')
        self.structure_helper.get_project_folder(selected_project)
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 1, "Database initialization probably failed!")
        data = dict(name="test_project", description="test_description", users=[])
        self.assertRaises(ProjectServiceException, self.project_service.store_project,
                          self.test_user, False, 99, **data)

    
    def test_find_project_happy_flow(self):
        """
        Standard flow for finding a project by it's id.
        """
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0, "Database reset probably failed!")
        inserted_project = TestFactory.create_project(self.test_user, 'test_project')
        self.assertTrue(self.project_service.find_project(inserted_project.id) is not None, "Project not found !")
        dao_returned_project = dao.get_project_by_id(inserted_project.id)
        service_returned_project = self.project_service.find_project(inserted_project.id)
        self.assertEqual(dao_returned_project.id, service_returned_project.id,
                         "Data returned from service is different from data returned by DAO.")
        self.assertEqual(dao_returned_project.name, service_returned_project.name, 
                         "Data returned from service is different than  data returned by DAO.")  
        self.assertEqual(dao_returned_project.description, service_returned_project.description,
                         "Data returned from service is different from data returned by DAO.")        
        self.assertEqual(dao_returned_project.members, service_returned_project.members,
                         "Data returned from service is different from data returned by DAO.")
                      
        
    def test_find_project_unexisting(self):
        """
        Searching for an un-existing project.
        """
        data = dict(name="test_project", description="test_description", users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0, "Database reset probably failed!")
        self.project_service.store_project(self.test_user, True, None, **data)
        self.assertRaises(ProjectServiceException, self.project_service.find_project, 99)  
        
        
    def test_retrieve_projects_for_user(self):
        """
        Test for retrieving the projects for a given user. One page only.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        self.assertEqual(len(initial_projects), 0, "Database was not reset properly!")
        TestFactory.create_project(self.test_user, 'test_proj')
        TestFactory.create_project(self.test_user, 'test_proj1')
        TestFactory.create_project(self.test_user, 'test_proj2')
        user1 = TestFactory.create_user('another_user')
        TestFactory.create_project(user1, 'test_proj3')
        projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
        for project in projects:
            self.assertNotEquals(project.name, "test_project3", "This project should not have been retrieved")   
            
            
    def test_retrieve_1project_3usr(self):
        """
        One user as admin, two users as members, getting projects for admin and for any of
        the members should return one.
        """
        member1 = TestFactory.create_user("member1")
        member2 = TestFactory.create_user("member2")
        TestFactory.create_project(self.test_user, 'Testproject', users=[member1.id, member2.id])
        projects = self.project_service.retrieve_projects_for_user(self.test_user.id, 1)[0]
        self.assertEqual(len(projects), 1, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(member1.id, 1)[0]
        self.assertEqual(len(projects), 1, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(member2.id, 1)[0]
        self.assertEqual(len(projects), 1, "Projects not retrieved properly!")
        
        
    def test_retrieve_3projects_3usr(self):
        """
        Three users, 3 projects. Structure of db:
        proj1: {admin: user1, members: [user2, user3]}
        proj2: {admin: user2, members: [user1]}
        proj3: {admin: user3, members: [user1, user2]}
        Check valid project returns for all the users.
        """
        member1 = TestFactory.create_user("member1")
        member2 = TestFactory.create_user("member2")
        member3 = TestFactory.create_user("member3")
        TestFactory.create_project(member1, 'TestProject1', users=[member2.id, member3.id])
        TestFactory.create_project(member2, 'TestProject2', users=[member1.id])
        TestFactory.create_project(member3, 'TestProject3', users=[member1.id, member2.id])
        projects = self.project_service.retrieve_projects_for_user(member1.id, 1)[0]
        self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(member2.id, 1)[0]
        self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(member3.id, 1)[0]
        self.assertEqual(len(projects), 2, "Projects not retrieved properly!")
        
        
    def test_retrieve_projects_random(self):
        """
        Generate a large number of users/projects, and validate the results.
        """
        ExtremeTestFactory.generate_users(NR_USERS, MAX_PROJ_PER_USER)
        for i in range(NR_USERS):
            current_user = dao.get_user_by_name("gen" + str(i))
            expected_projects = ExtremeTestFactory.VALIDATION_DICT[current_user.id]
            if expected_projects % PROJECTS_PAGE_SIZE == 0:
                expected_pages = expected_projects / PROJECTS_PAGE_SIZE
                exp_proj_per_page = PROJECTS_PAGE_SIZE
            else:
                expected_pages = expected_projects / PROJECTS_PAGE_SIZE + 1
                exp_proj_per_page = expected_projects % PROJECTS_PAGE_SIZE
            if expected_projects == 0:
                expected_pages = 0
                exp_proj_per_page = 0
            projects, pages = self.project_service.retrieve_projects_for_user(current_user.id, expected_pages)
            self.assertEqual(len(projects), exp_proj_per_page, "Projects not retrieved properly! Expected:" +
                             str(exp_proj_per_page) + "but got:" + str(len(projects)))
            self.assertEqual(pages, expected_pages, "Pages not retrieved properly!")

        for folder in os.listdir(TvbProfile.current.TVB_STORAGE):
            full_path = os.path.join(TvbProfile.current.TVB_STORAGE, folder)
            if os.path.isdir(full_path) and folder.startswith('Generated'): 
                shutil.rmtree(full_path)
        
            
    def test_retrieve_projects_page2(self):
        """
        Test for retrieving the second page projects for a given user.
        """
        for i in range(PROJECTS_PAGE_SIZE + 3):
            TestFactory.create_project(self.test_user, 'test_proj' + str(i))
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id, 2)
        self.assertEqual(len(projects), (PROJECTS_PAGE_SIZE + 3) % PROJECTS_PAGE_SIZE, "Pagination inproper.")
        self.assertEqual(pages, 2, 'Wrong number of pages retrieved.')
        
        
    def test_retrieve_projects_and_del(self):
        """
        Test for retrieving the second page projects for a given user.
        """
        created_projects = []
        for i in range(PROJECTS_PAGE_SIZE + 1):
            created_projects.append(TestFactory.create_project(self.test_user, 'test_proj' + str(i)))
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id, 2)
        self.assertEqual(len(projects), (PROJECTS_PAGE_SIZE + 1) % PROJECTS_PAGE_SIZE, "Pagination improper.")
        self.assertEqual(pages, (PROJECTS_PAGE_SIZE + 1) / PROJECTS_PAGE_SIZE + 1, 'Wrong number of pages')
        self.project_service.remove_project(created_projects[1].id)
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id, 2)
        self.assertEqual(len(projects), 0, "Pagination improper.")
        self.assertEqual(pages, 1, 'Wrong number of pages retrieved.')
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id, 1)
        self.assertEqual(len(projects), PROJECTS_PAGE_SIZE, "Pagination improper.")
        self.assertEqual(pages, 1, 'Wrong number of pages retrieved.')


    def test_empty_project_has_zero_disk_size(self):
        TestFactory.create_project(self.test_user, 'test_proj')
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id)
        self.assertEqual(0, projects[0].disk_size)
        self.assertEqual('0.0 KiB', projects[0].disk_size_human)


    def test_project_disk_size(self):
        project1 = TestFactory.create_project(self.test_user, 'test_proj1')
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip')
        TestFactory.import_zip_connectivity(self.test_user, project1, 'testSubject', zip_path)

        project2 = TestFactory.create_project(self.test_user, 'test_proj2')
        TestFactory.import_cff(test_user=self.test_user, test_project=project2)

        projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        self.assertNotEqual(projects[0].disk_size, projects[1].disk_size, "projects should have different size")

        for project in projects:
            self.assertNotEqual(0, project.disk_size)
            self.assertNotEqual('0.0 KiB', project.disk_size_human)

            prj_folder = self.structure_helper.get_project_folder(project)
            actual_disk_size = self.compute_recursive_h5_disk_usage(prj_folder)[0]

            ratio = float(actual_disk_size) / project.disk_size
            msg = "Real disk usage: %s The one recorded in the db : %s" % (actual_disk_size, project.disk_size)
            self.assertTrue(ratio < 1.4, msg)


    def test_get_linkable_projects(self):
        """
        Test for retrieving the projects for a given user.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        self.assertEqual(len(initial_projects), 0, "Database was not reset!")
        test_proj = []
        user1 = TestFactory.create_user("another_user")
        for i in range(4):
            test_proj.append(TestFactory.create_project(self.test_user if i < 3 else user1, 'test_proj' + str(i)))

        project_storage = self.structure_helper.get_project_folder(test_proj[0])

        operation = TestFactory.create_operation(test_user=self.test_user, test_project=test_proj[0])

        project_storage = os.path.join(project_storage, str(operation.id))
        os.makedirs(project_storage)
        datatype = dao.store_entity(model.DataType(module="test_data", subject="subj1", 
                                                   state="test_state", operation_id=operation.id))
        linkable = self.project_service.get_linkable_projects_for_user(self.test_user.id, str(datatype.id))[0]
        self.assertEqual(len(linkable), 2, "Wrong count of link-able projects!")
        proj_names = [project.name for project in linkable]
        self.assertTrue(test_proj[1].name in proj_names)
        self.assertTrue(test_proj[2].name in proj_names)
        self.assertFalse(test_proj[3].name in proj_names)    
    
    
    def test_remove_project_happy_flow(self):
        """
        Standard flow for deleting a project.
        """
        inserted_project = TestFactory.create_project(self.test_user, 'test_proj')
        project_root = self.structure_helper.get_project_folder(inserted_project)
        projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(projects), 1, "Initializations failed!") 
        self.assertTrue(os.path.exists(project_root), "Something failed at insert time!")
        self.project_service.remove_project(inserted_project.id)
        projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(projects), 0, "Project was not deleted!")  
        self.assertFalse(os.path.exists(project_root), "Root folder not deleted!")  
        
        
    def test_remove_project_wrong_id(self):
        """
        Flow for deleting a project giving an un-existing id.
        """
        TestFactory.create_project(self.test_user, 'test_proj')
        projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(projects), 1, "Initializations failed!") 
        self.assertRaises(ProjectServiceException, self.project_service.remove_project, 99)   
    

    @staticmethod
    def _create_value_wrapper(test_user, test_project=None):
        """
        Creates a ValueWrapper dataType, and the associated parent Operation.
        This is also used in ProjectStructureTest.
        """
        if test_project is None:
            test_project = TestFactory.create_project(test_user, 'test_proj')
        operation = TestFactory.create_operation(test_user=test_user, test_project=test_project)
        value_wrapper = ValueWrapper(data_value=5.0, data_name="my_value")
        value_wrapper.type = "ValueWrapper"
        value_wrapper.module = "tvb.datatypes.mapped_values"
        value_wrapper.subject = "John Doe"
        value_wrapper.state = "RAW_STATE"
        value_wrapper.set_operation_id(operation.id)
        adapter_instance = StoreAdapter([value_wrapper])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        all_value_wrappers = FlowService().get_available_datatypes(test_project.id,
                                                                   "tvb.datatypes.mapped_values.ValueWrapper")[0]
        if len(all_value_wrappers) != 1:
            raise Exception("Should be only one value wrapper.")
        result_vw = ABCAdapter.load_entity_by_gid(all_value_wrappers[0][2])
        return test_project, result_vw.gid, operation.gid

     
    def __check_meta_data(self, expected_meta_data, new_datatype):
        """Validate Meta-Data"""
        mapp_keys = {DataTypeMetaData.KEY_SUBJECT: "subject", DataTypeMetaData.KEY_STATE: "state"}
        for key, value in expected_meta_data.iteritems():
            if key in mapp_keys:
                self.assertEqual(value, getattr(new_datatype, mapp_keys[key]))
            elif key == DataTypeMetaData.KEY_OPERATION_TAG:
                if DataTypeMetaData.KEY_OP_GROUP_ID in expected_meta_data:
                    ## We have a Group to check
                    op_group = new_datatype.parent_operation.fk_operation_group
                    op_group = dao.get_generic_entity(model.OperationGroup, op_group)[0]
                    self.assertEqual(value, op_group.name)
                else:
                    self.assertEqual(value, new_datatype.parent_operation.user_group) 
    
    
    def test_remove_project_node(self):
        """
        Test removing of a node from a project.
        """
        inserted_project, gid, gid_op = self._create_value_wrapper(self.test_user) 
        project_to_link = model.Project("Link", self.test_user.id, "descript")
        project_to_link = dao.store_entity(project_to_link)
        exact_data = dao.get_datatype_by_gid(gid)
        dao.store_entity(model.Links(exact_data.id, project_to_link.id))
        self.assertTrue(dao.get_datatype_by_gid(gid) is not None, "Initialization problem!")
        
        operation_id = dao.get_generic_entity(model.Operation, gid_op, 'gid')[0].id
        op_folder = self.structure_helper.get_project_folder("test_proj", str(operation_id))
        self.assertTrue(os.path.exists(op_folder))
        sub_files = os.listdir(op_folder)
        self.assertEqual(2, len(sub_files))
        ### Validate that no more files are created than needed.
        
        self.project_service._remove_project_node_files(inserted_project.id, gid)
        sub_files = os.listdir(op_folder)
        self.assertEqual(1, len(sub_files))
        ### operation.xml file should still be there
        
        op_folder = self.structure_helper.get_project_folder("Link", str(operation_id + 1)) 
        sub_files = os.listdir(op_folder)
        self.assertEqual(2, len(sub_files))
        self.assertTrue(dao.get_datatype_by_gid(gid) is not None, "Data should still be in DB, because of links")
        self.project_service._remove_project_node_files(project_to_link.id, gid)
        self.assertTrue(dao.get_datatype_by_gid(gid) is None)  
        sub_files = os.listdir(op_folder)
        self.assertEqual(1, len(sub_files))
        ### operation.xml file should still be there
        
        
    def test_update_meta_data_simple(self):
        """
        Test the new update metaData for a simple data that is not part of a group.
        """
        inserted_project, gid, _ = self._create_value_wrapper(self.test_user)
        new_meta_data = {DataTypeOverlayDetails.DATA_SUBJECT: "new subject",
                         DataTypeOverlayDetails.DATA_STATE: "second_state",
                         DataTypeOverlayDetails.CODE_GID: gid,
                         DataTypeOverlayDetails.CODE_OPERATION_TAG: 'new user group'}
        self.project_service.update_metadata(new_meta_data)
        
        new_datatype = dao.get_datatype_by_gid(gid)
        self.__check_meta_data(new_meta_data, new_datatype)
        
        op_path = FilesHelper().get_operation_meta_file_path(inserted_project.name, new_datatype.parent_operation.id)
        op_meta = XMLReader(op_path).read_metadata()
        self.assertEqual(op_meta['user_group'], 'new user group', 'UserGroup not updated!')


    def test_update_meta_data_group(self):
        """
        Test the new update metaData for a group of dataTypes.
        """
        datatypes, group_id = TestFactory.create_group(self.test_user, subject="test-subject-1")

        new_meta_data = {DataTypeOverlayDetails.DATA_SUBJECT: "new subject",
                         DataTypeOverlayDetails.DATA_STATE: "updated_state",
                         DataTypeOverlayDetails.CODE_OPERATION_GROUP_ID: group_id,
                         DataTypeOverlayDetails.CODE_OPERATION_TAG: 'newGroupName'}
        self.project_service.update_metadata(new_meta_data)  
          
        for datatype in datatypes:
            new_datatype = dao.get_datatype_by_id(datatype.id)
            self.assertEqual(group_id, new_datatype.parent_operation.fk_operation_group)
            new_group = dao.get_generic_entity(model.OperationGroup, group_id)[0]
            self.assertEqual(new_group.name, "newGroupName") 
            self.__check_meta_data(new_meta_data, new_datatype)
            
    
    def _create_datatypes(self, dt_factory, nr_of_dts):
        for idx in range(nr_of_dts):
            dt = Datatype1()
            dt.row1 = "value%i" % (idx,)
            dt.row2 = "value%i" % (idx + 1,)
            dt_factory._store_datatype(dt)
            
            
    def test_retrieve_project_full(self):
        """
        Tests full project information is retrieved by method `ProjectService.retrieve_project_full(...)`
        """
        dt_factory = datatypes_factory.DatatypesFactory()
        self._create_datatypes(dt_factory, 3)
        _, ops_nr, operations, pages_no = self.project_service.retrieve_project_full(dt_factory.project.id)
        self.assertEqual(ops_nr, 1, "DataType Factory should only use one operation to store all it's datatypes.")
        self.assertEqual(pages_no, 1, "DataType Factory should only use one operation to store all it's datatypes.")
        resulted_dts = operations[0]['results']
        self.assertEqual(len(resulted_dts), 3, "3 datatypes should be created.")
        
        
    def test_get_project_structure(self):
        """
        Tests project structure is as expected and contains all datatypes
        """
        SELF_DTS_NUMBER = 3
        dt_factory_1 = datatypes_factory.DatatypesFactory()
        self._create_datatypes(dt_factory_1, SELF_DTS_NUMBER)
        dt_group = dt_factory_1.create_datatype_group()

        link_ids, expected_links = [], []
        # Prepare link towards a simple DT
        dt_factory_2 = datatypes_factory.DatatypesFactory()
        dt_to_link = dt_factory_2.create_simple_datatype()
        link_ids.append(dt_to_link.id)
        expected_links.append(dt_to_link.gid)

        # Prepare links towards a full DT Group, but expecting only the DT_Group in the final tree
        link_gr = dt_factory_2.create_datatype_group()
        dts = dao.get_datatype_in_group(datatype_group_id=link_gr.id)
        link_ids.extend([dt_to_link.id for dt_to_link in dts])
        link_ids.append(link_gr.id)
        expected_links.append(link_gr.gid)

        # Prepare link towards a single DT inside a group, and expecting to find the DT in the final tree
        link_gr = dt_factory_2.create_datatype_group()
        dt_to_link = dao.get_datatype_in_group(datatype_group_id=link_gr.id)[0]
        link_ids.append(dt_to_link.id)
        expected_links.append(dt_to_link.gid)

        # Actually create the links from Prj2 into Prj1
        FlowService().create_link(link_ids, dt_factory_1.project.id)

        # Retrieve the raw data used to compose the tree (for easy parsing)
        dts_in_tree = dao.get_data_in_project(dt_factory_1.project.id)
        dts_in_tree = [dt.gid for dt in dts_in_tree]
        # Retrieve the tree json (for trivial validations only, as we can not decode)
        node_json = self.project_service.get_project_structure(dt_factory_1.project, None, DataTypeMetaData.KEY_STATE,
                                                               DataTypeMetaData.KEY_SUBJECT, None)

        self.assertEqual(len(expected_links) + SELF_DTS_NUMBER + 2, len(dts_in_tree), "invalid number of nodes in tree")
        self.assertFalse(link_gr.gid in dts_in_tree, "DT_group where a single DT is linked is not expected.")
        self.assertTrue(dt_group.gid in dts_in_tree, "DT_Group should be in the Project Tree!")
        self.assertTrue(dt_group.gid in node_json, "DT_Group should be in the Project Tree JSON!")

        project_dts = dao.get_datatypes_in_project(dt_factory_1.project.id)
        for dt in project_dts:
            if dt.fk_datatype_group is not None:
                self.assertFalse(dt.gid in node_json, "DTs part of a group should not be")
                self.assertFalse(dt.gid in dts_in_tree, "DTs part of a group should not be")
            else:
                self.assertTrue(dt.gid in node_json, "Simple DTs and DT_Groups should be")
                self.assertTrue(dt.gid in dts_in_tree, "Simple DTs and DT_Groups should be")

        for link_gid in expected_links:
            self.assertTrue(link_gid in node_json, "Expected Link not present")
            self.assertTrue(link_gid in dts_in_tree, "Expected Link not present")
Пример #2
0
class ProjectService:
    """
    Services layer for Project entities.
    """

    def __init__(self):
        self.logger = get_logger(__name__)
        self.structure_helper = FilesHelper()

    def store_project(self, current_user, is_create, selected_id, **data):
        """
        We want to create/update a project entity.
        """
        # Validate Unique Name
        new_name = data["name"]
        if len(new_name) < 1:
            raise ProjectServiceException("Invalid project name!")
        projects_no = dao.count_projects_for_name(new_name, selected_id)
        if projects_no > 0:
            err = {'name': 'Please choose another name, this one is used!'}
            raise formencode.Invalid("Duplicate Name Error", {}, None, error_dict=err)
        started_operations = dao.get_operation_numbers(selected_id)[1]
        if started_operations > 0:
            raise ProjectServiceException("A project can not be renamed while operations are still running!")
        if is_create:
            current_proj = Project(new_name, current_user.id, data["description"])
            self.structure_helper.get_project_folder(current_proj)
        else:
            try:
                current_proj = dao.get_project_by_id(selected_id)
            except Exception as excep:
                self.logger.exception("An error has occurred!")
                raise ProjectServiceException(str(excep))
            if current_proj.name != new_name:
                self.structure_helper.rename_project_structure(current_proj.name, new_name)
            current_proj.name = new_name
            current_proj.description = data["description"]
        # Commit to make sure we have a valid ID
        current_proj.refresh_update_date()
        self.structure_helper.write_project_metadata(current_proj)
        current_proj = dao.store_entity(current_proj)

        # Retrieve, to initialize lazy attributes
        current_proj = dao.get_project_by_id(current_proj.id)
        # Update share settings on current Project entity
        visited_pages = []
        prj_admin = current_proj.administrator.username
        if 'visited_pages' in data and data['visited_pages']:
            visited_pages = data['visited_pages'].split(',')
        for page in visited_pages:
            members = UserService.retrieve_users_except(prj_admin, int(page), MEMBERS_PAGE_SIZE)[0]
            members = [m.id for m in members]
            dao.delete_members_for_project(current_proj.id, members)

        selected_user_ids = data["users"]
        if is_create and current_user.id not in selected_user_ids:
            # Make the project admin also member of the current project
            selected_user_ids.append(current_user.id)
        dao.add_members_to_project(current_proj.id, selected_user_ids)
        # Finish operation
        self.logger.debug("Edit/Save OK for project:" + str(current_proj.id) + ' by user:'******'-'
                result["count"] = one_op[2]
                result["gid"] = one_op[13]
                if one_op[3] is not None and one_op[3]:
                    try:
                        operation_group = dao.get_generic_entity(OperationGroup, one_op[3])[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3])
                        result["datatype_group_gid"] = datatype_group.gid
                        result["gid"] = operation_group.gid
                        ## Filter only viewers for current DataTypeGroup entity:
                        result["view_groups"] = FlowService().get_visualizers_for_group(datatype_group.gid)
                    except Exception:
                        self.logger.exception("We will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["user"] = dao.get_user_by_id(one_op[5])
                if type(one_op[6]) is str:
                    result["create"] = string2date(str(one_op[6]))
                else:
                    result["create"] = one_op[6]
                if type(one_op[7]) is str:
                    result["start"] = string2date(str(one_op[7]))
                else:
                    result["start"] = one_op[7]
                if type(one_op[8]) is str:
                    result["complete"] = string2date(str(one_op[8]))
                else:
                    result["complete"] = one_op[8]

                if result["complete"] is not None and result["start"] is not None:
                    result["duration"] = format_timedelta(result["complete"] - result["start"])
                result["status"] = one_op[9]
                result["additional"] = one_op[10]
                result["visible"] = True if one_op[11] > 0 else False
                result['operation_tag'] = one_op[12]
                result['figures'] = None
                if not result['group']:
                    datatype_results = dao.get_results_for_operation(result['id'])
                    result['results'] = []
                    for dt in datatype_results:
                        dt_loaded = ABCAdapter.load_entity_by_gid(dt.gid)
                        if dt_loaded:
                            result['results'].append(dt_loaded)
                        else:
                            self.logger.warning("Could not retrieve datatype %s" % str(dt))

                    operation_figures = dao.get_figures_for_operation(result['id'])

                    # Compute the full path to the figure / image on disk
                    for figure in operation_figures:
                        figures_folder = self.structure_helper.get_images_folder(figure.project.name)
                        figure_full_path = os.path.join(figures_folder, figure.file_path)
                        # Compute the path available from browser
                        figure.figure_path = utils.path2url_part(figure_full_path)

                    result['figures'] = operation_figures
                else:
                    result['results'] = None
                operations.append(result)
            except Exception:
                ## We got an exception when processing one Operation Row. We will continue with the rest of the rows.
                self.logger.exception("Could not prepare operation for display:" + str(one_op))
        return selected_project, total_ops_nr, operations, pages_no

    def retrieve_projects_for_user(self, user_id, current_page=1):
        """
        Return a list with all Projects visible for current user.
        """
        start_idx = PROJECTS_PAGE_SIZE * (current_page - 1)
        total = dao.get_projects_for_user(user_id, is_count=True)
        available_projects = dao.get_projects_for_user(user_id, start_idx, PROJECTS_PAGE_SIZE)
        pages_no = total // PROJECTS_PAGE_SIZE + (1 if total % PROJECTS_PAGE_SIZE else 0)
        for prj in available_projects:
            fns, sta, err, canceled, pending = dao.get_operation_numbers(prj.id)
            prj.operations_finished = fns
            prj.operations_started = sta
            prj.operations_error = err
            prj.operations_canceled = canceled
            prj.operations_pending = pending
            prj.disk_size = dao.get_project_disk_size(prj.id)
            prj.disk_size_human = format_bytes_human(prj.disk_size)
        self.logger.debug("Displaying " + str(len(available_projects)) + " projects in UI for user " + str(user_id))
        return available_projects, pages_no

    @staticmethod
    def retrieve_all_user_projects(user_id, page_start=0, page_size= PROJECTS_PAGE_SIZE):
        """
        Return a list with all projects visible for current user, without pagination.
        """
        return dao.get_projects_for_user(user_id, page_start=page_start, page_size=page_size)

    @staticmethod
    def get_linkable_projects_for_user(user_id, data_id):
        """
        Find projects with are visible for current user, and in which current datatype hasn't been linked yet.
        """
        return dao.get_linkable_projects_for_user(user_id, data_id)

    @transactional
    def remove_project(self, project_id):
        """
        Remove Project from DB and File Storage.
        """
        try:
            project2delete = dao.get_project_by_id(project_id)

            self.logger.debug("Deleting project: id=" + str(project_id) + ' name=' + project2delete.name)
            project_bursts = dao.get_bursts_for_project(project_id)
            for burst in project_bursts:
                dao.remove_entity(burst.__class__, burst.id)

            project_datatypes = dao.get_datatypes_in_project(project_id)
            for one_data in project_datatypes:
                self.remove_datatype(project_id, one_data.gid, True)

            links = dao.get_links_for_project(project_id)
            for one_link in links:
                dao.remove_entity(Links, one_link.id)

            self.structure_helper.remove_project_structure(project2delete.name)
            dao.delete_project(project_id)
            self.logger.debug("Deleted project: id=" + str(project_id) + ' name=' + project2delete.name)

        except RemoveDataTypeException as excep:
            self.logger.exception("Could not execute operation Node Remove!")
            raise ProjectServiceException(str(excep))
        except FileStructureException as excep:
            self.logger.exception("Could not delete because of rights!")
            raise ProjectServiceException(str(excep))
        except Exception as excep:
            self.logger.exception(str(excep))
            raise ProjectServiceException(str(excep))

    # ----------------- Methods for populating Data-Structure Page ---------------

    @staticmethod
    def get_datatype_in_group(group):
        """
        Return all dataTypes that are the result of the same DTgroup.
        """
        return dao.get_datatype_in_group(datatype_group_id=group)

    @staticmethod
    def get_datatypes_from_datatype_group(datatype_group_id):
        """
        Retrieve all dataType which are part from the given dataType group.
        """
        return dao.get_datatypes_from_datatype_group(datatype_group_id)

    @staticmethod
    def load_operation_by_gid(operation_gid):
        """ Retrieve loaded Operation from DB"""
        return dao.get_operation_by_gid(operation_gid)

    @staticmethod
    def load_operation_lazy_by_gid(operation_gid):
        """ Retrieve lazy Operation from DB"""
        return dao.get_operation_lazy_by_gid(operation_gid)

    @staticmethod
    def get_operation_group_by_id(operation_group_id):
        """ Loads OperationGroup from DB"""
        return dao.get_operationgroup_by_id(operation_group_id)

    @staticmethod
    def get_operation_group_by_gid(operation_group_gid):
        """ Loads OperationGroup from DB"""
        return dao.get_operationgroup_by_gid(operation_group_gid)

    @staticmethod
    def get_operations_in_group(operation_group):
        """ Return all the operations from an operation group. """
        return dao.get_operations_in_group(operation_group.id)

    @staticmethod
    def is_upload_operation(operation_gid):
        """ Returns True only if the operation with the given GID is an upload operation. """
        return dao.is_upload_operation(operation_gid)

    @staticmethod
    def get_all_operations_for_uploaders(project_id):
        """ Returns all finished upload operations. """
        return dao.get_all_operations_for_uploaders(project_id)

    def set_operation_and_group_visibility(self, entity_gid, is_visible, is_operation_group=False):
        """
        Sets the operation visibility.

        If 'is_operation_group' is True than this method will change the visibility for all
        the operation from the OperationGroup with the GID field equal to 'entity_gid'.
        """

        def set_visibility(op):
            # workaround:
            # 'reload' the operation so that it has the project property set.
            # get_operations_in_group does not eager load it and now we're out of a sqlalchemy session
            # write_operation_metadata requires that property
            op = dao.get_operation_by_id(op.id)
            # end hack
            op.visible = is_visible
            self.structure_helper.write_operation_metadata(op)
            dao.store_entity(op)

        def set_group_descendants_visibility(operation_group_id):
            ops_in_group = dao.get_operations_in_group(operation_group_id)
            for group_op in ops_in_group:
                set_visibility(group_op)

        if is_operation_group:
            op_group_id = dao.get_operationgroup_by_gid(entity_gid).id
            set_group_descendants_visibility(op_group_id)
        else:
            operation = dao.get_operation_by_gid(entity_gid)
            # we assure that if the operation belongs to a group than the visibility will be changed for the entire group
            if operation.fk_operation_group is not None:
                set_group_descendants_visibility(operation.fk_operation_group)
            else:
                set_visibility(operation)

    def get_operation_details(self, operation_gid, is_group):
        """
        :returns: an entity OperationOverlayDetails filled with all information for current operation details.
        """

        if is_group:
            operation_group = self.get_operation_group_by_gid(operation_gid)
            operation = dao.get_operations_in_group(operation_group.id, False, True)
            # Reload, to make sure all attributes lazy are populated as well.
            operation = dao.get_operation_by_gid(operation.gid)
            no_of_op_in_group = dao.get_operations_in_group(operation_group.id, is_count=True)
            datatype_group = self.get_datatypegroup_by_op_group_id(operation_group.id)
            count_result = dao.count_datatypes_in_group(datatype_group.id)

        else:
            operation = dao.get_operation_by_gid(operation_gid)
            if operation is None:
                return None
            no_of_op_in_group = 1
            count_result = dao.count_resulted_datatypes(operation.id)

        user_display_name = dao.get_user_by_id(operation.fk_launched_by).display_name
        burst = dao.get_burst_for_operation_id(operation.id)
        datatypes_param, all_special_params = self._review_operation_inputs(operation.gid)

        op_pid = dao.get_operation_process_for_operation(operation.id)
        op_details = OperationOverlayDetails(operation, user_display_name, len(datatypes_param),
                                             count_result, burst, no_of_op_in_group, op_pid)

        # Add all parameter which are set differently by the user on this Operation.
        if all_special_params is not None:
            op_details.add_scientific_fields(all_special_params)
        return op_details

    @staticmethod
    def get_filterable_meta():
        """
        Contains all the attributes by which
        the user can structure the tree of DataTypes
        """
        return DataTypeMetaData.get_filterable_meta()

    def get_project_structure(self, project, visibility_filter, first_level, second_level, filter_value):
        """
        Find all DataTypes (including the linked ones and the groups) relevant for the current project.
        In case of a problem, will return an empty list.
        """
        metadata_list = []
        dt_list = dao.get_data_in_project(project.id, visibility_filter, filter_value)

        for dt in dt_list:
            # Prepare the DT results from DB, for usage in controller, by converting into DataTypeMetaData objects
            data = {}
            is_group = False
            group_op = None
            dt_entity = dao.get_datatype_by_gid(dt.gid)
            if dt_entity is None:
                self.logger.warning("Ignored entity (possibly removed DT class)" + str(dt))
                continue
            #  Filter by dt.type, otherwise Links to individual DT inside a group will be mistaken
            if dt.type == "DataTypeGroup" and dt.parent_operation.operation_group is not None:
                is_group = True
                group_op = dt.parent_operation.operation_group

            # All these fields are necessary here for dynamic Tree levels.
            data[DataTypeMetaData.KEY_DATATYPE_ID] = dt.id
            data[DataTypeMetaData.KEY_GID] = dt.gid
            data[DataTypeMetaData.KEY_NODE_TYPE] = dt.display_type
            data[DataTypeMetaData.KEY_STATE] = dt.state
            data[DataTypeMetaData.KEY_SUBJECT] = str(dt.subject)
            data[DataTypeMetaData.KEY_TITLE] = dt_entity.display_name
            data[DataTypeMetaData.KEY_RELEVANCY] = dt.visible
            data[DataTypeMetaData.KEY_LINK] = dt.parent_operation.fk_launched_in != project.id

            data[DataTypeMetaData.KEY_TAG_1] = dt.user_tag_1 if dt.user_tag_1 else ''
            data[DataTypeMetaData.KEY_TAG_2] = dt.user_tag_2 if dt.user_tag_2 else ''
            data[DataTypeMetaData.KEY_TAG_3] = dt.user_tag_3 if dt.user_tag_3 else ''
            data[DataTypeMetaData.KEY_TAG_4] = dt.user_tag_4 if dt.user_tag_4 else ''
            data[DataTypeMetaData.KEY_TAG_5] = dt.user_tag_5 if dt.user_tag_5 else ''

            # Operation related fields:
            operation_name = CommonDetails.compute_operation_name(
                dt.parent_operation.algorithm.algorithm_category.displayname,
                dt.parent_operation.algorithm.displayname)
            data[DataTypeMetaData.KEY_OPERATION_TYPE] = operation_name
            data[DataTypeMetaData.KEY_OPERATION_ALGORITHM] = dt.parent_operation.algorithm.displayname
            data[DataTypeMetaData.KEY_AUTHOR] = dt.parent_operation.user.username
            data[DataTypeMetaData.KEY_OPERATION_TAG] = group_op.name if is_group else dt.parent_operation.user_group
            data[DataTypeMetaData.KEY_OP_GROUP_ID] = group_op.id if is_group else None

            completion_date = dt.parent_operation.completion_date
            string_year = completion_date.strftime(MONTH_YEAR_FORMAT) if completion_date is not None else ""
            string_month = completion_date.strftime(DAY_MONTH_YEAR_FORMAT) if completion_date is not None else ""
            data[DataTypeMetaData.KEY_DATE] = date2string(completion_date) if (completion_date is not None) else ''
            data[DataTypeMetaData.KEY_CREATE_DATA_MONTH] = string_year
            data[DataTypeMetaData.KEY_CREATE_DATA_DAY] = string_month

            data[DataTypeMetaData.KEY_BURST] = dt._parent_burst.name if dt._parent_burst is not None else '-None-'

            metadata_list.append(DataTypeMetaData(data, dt.invalid))

        return StructureNode.metadata2tree(metadata_list, first_level, second_level, project.id, project.name)

    @staticmethod
    def get_datatype_details(datatype_gid):
        """
        :returns: an array. First entry in array is an instance of DataTypeOverlayDetails\
            The second one contains all the possible states for the specified dataType.

        """
        meta_atts = DataTypeOverlayDetails()
        states = DataTypeMetaData.STATES
        try:
            datatype_result = dao.get_datatype_details(datatype_gid)
            meta_atts.fill_from_datatype(datatype_result, datatype_result._parent_burst)
            return meta_atts, states, datatype_result
        except Exception:
            ## We ignore exception here (it was logged above, and we want to return no details).
            return meta_atts, states, None

    def _remove_project_node_files(self, project_id, gid, skip_validation=False):
        """
        Delegate removal of a node in the structure of the project.
        In case of a problem will THROW StructureException.
        """
        try:
            project = self.find_project(project_id)
            datatype = dao.get_datatype_by_gid(gid)
            links = dao.get_links_for_datatype(datatype.id)
            if links:
                was_link = False
                for link in links:
                    # This means it's only a link and we need to remove it
                    if link.fk_from_datatype == datatype.id and link.fk_to_project == project.id:
                        dao.remove_entity(Links, link.id)
                        was_link = True
                if not was_link:
                    # Create a clone of the operation
                    new_op = Operation(dao.get_system_user().id,
                                       links[0].fk_to_project,
                                       datatype.parent_operation.fk_from_algo,
                                       datatype.parent_operation.parameters,
                                       datatype.parent_operation.meta_data,
                                       datatype.parent_operation.status,
                                       datatype.parent_operation.start_date,
                                       datatype.parent_operation.completion_date,
                                       datatype.parent_operation.fk_operation_group,
                                       datatype.parent_operation.additional_info,
                                       datatype.parent_operation.user_group,
                                       datatype.parent_operation.range_values)
                    new_op = dao.store_entity(new_op)
                    to_project = self.find_project(links[0].fk_to_project).name
                    new_op_loaded = dao.get_operation_by_id(new_op.id)
                    self.structure_helper.write_operation_metadata(new_op_loaded)
                    full_path = h5.path_for_stored_index(datatype)
                    self.structure_helper.move_datatype(datatype, to_project, str(new_op.id), full_path)
                    datatype.fk_from_operation = new_op.id
                    datatype.parent_operation = new_op
                    dao.store_entity(datatype)
                    dao.remove_entity(Links, links[0].id)
            else:
                specific_remover = get_remover(datatype.type)(datatype)
                specific_remover.remove_datatype(skip_validation)
                h5_path = h5.path_for_stored_index(datatype)
                self.structure_helper.remove_datatype_file(h5_path)

        except RemoveDataTypeException:
            self.logger.exception("Could not execute operation Node Remove!")
            raise
        except FileStructureException:
            self.logger.exception("Remove operation failed")
            raise StructureException("Remove operation failed for unknown reasons.Please contact system administrator.")

    def remove_operation(self, operation_id):
        """
        Remove a given operation
        """
        operation = dao.try_get_operation_by_id(operation_id)
        if operation is not None:
            self.logger.debug("Deleting operation %s " % operation)
            datatypes_for_op = dao.get_results_for_operation(operation_id)
            for dt in reversed(datatypes_for_op):
                self.remove_datatype(operation.project.id, dt.gid, False)
            dao.remove_entity(Operation, operation.id)
            self.structure_helper.remove_operation_data(operation.project.name, operation_id)
            self.logger.debug("Finished deleting operation %s " % operation)
        else:
            self.logger.warning("Attempt to delete operation with id=%s which no longer exists." % operation_id)

    def remove_datatype(self, project_id, datatype_gid, skip_validation=False):
        """
        Method used for removing a dataType. If the given dataType is a DatatypeGroup
        or a dataType from a DataTypeGroup than this method will remove the entire group.
        The operation(s) used for creating the dataType(s) will also be removed.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype is None:
            self.logger.warning("Attempt to delete DT[%s] which no longer exists." % datatype_gid)
            return

        is_datatype_group = False
        if dao.is_datatype_group(datatype_gid):
            is_datatype_group = True
        elif datatype.fk_datatype_group is not None:
            is_datatype_group = True
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)

        operations_set = [datatype.fk_from_operation]

        correct = True

        if is_datatype_group:
            self.logger.debug("Removing datatype group %s" % datatype)
            data_list = dao.get_datatypes_from_datatype_group(datatype.id)
            for adata in data_list:
                self._remove_project_node_files(project_id, adata.gid, skip_validation)
                if adata.fk_from_operation not in operations_set:
                    operations_set.append(adata.fk_from_operation)

            datatype_group = dao.get_datatype_group_by_gid(datatype.gid)
            dao.remove_datatype(datatype_gid)
            correct = correct and dao.remove_entity(OperationGroup, datatype_group.fk_operation_group)
        else:
            self.logger.debug("Removing datatype %s" % datatype)
            self._remove_project_node_files(project_id, datatype.gid, skip_validation)

        ## Remove Operation entity in case no other DataType needs them.
        project = dao.get_project_by_id(project_id)
        for operation_id in operations_set:
            dependent_dt = dao.get_generic_entity(DataType, operation_id, "fk_from_operation")
            if len(dependent_dt) > 0:
                ### Do not remove Operation in case DataType still exist referring it.
                continue
            correct = correct and dao.remove_entity(Operation, operation_id)
            ## Make sure Operation folder is removed
            self.structure_helper.remove_operation_data(project.name, datatype.fk_from_operation)

        if not correct:
            raise RemoveDataTypeException("Could not remove DataType " + str(datatype_gid))

    def update_metadata(self, submit_data):
        """
        Update DataType/ DataTypeGroup metadata
        THROW StructureException when input data is invalid.
        """
        new_data = dict()
        for key in DataTypeOverlayDetails().meta_attributes_list:
            if key in submit_data:
                new_data[key] = submit_data[key]

        if new_data[CommonDetails.CODE_OPERATION_TAG] == '':
            new_data[CommonDetails.CODE_OPERATION_TAG] = None
        try:
            if (CommonDetails.CODE_OPERATION_GROUP_ID in new_data
                    and new_data[CommonDetails.CODE_OPERATION_GROUP_ID]
                    and new_data[CommonDetails.CODE_OPERATION_GROUP_ID] != ''):
                # We need to edit a group
                all_data_in_group = dao.get_datatype_in_group(operation_group_id=
                                                              new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
                if len(all_data_in_group) < 1:
                    raise StructureException("Inconsistent group, can not be updated!")
                datatype_group = dao.get_generic_entity(DataTypeGroup, all_data_in_group[0].fk_datatype_group)[0]
                all_data_in_group.append(datatype_group)
                for datatype in all_data_in_group:
                    new_data[CommonDetails.CODE_GID] = datatype.gid
                    self._edit_data(datatype, new_data, True)
            else:
                # Get the required DataType and operation from DB to store changes that will be done in XML.
                gid = new_data[CommonDetails.CODE_GID]
                datatype = dao.get_datatype_by_gid(gid)
                self._edit_data(datatype, new_data)
        except Exception as excep:
            self.logger.exception(excep)
            raise StructureException(str(excep))

    def _edit_data(self, datatype, new_data, from_group=False):
        """
        Private method, used for editing a meta-data XML file and a DataType row
        for a given custom DataType entity with new dictionary of data from UI.
        """
        from tvb.basic.traits.types_mapped import MappedType

        if isinstance(datatype, MappedType) and not os.path.exists(datatype.get_storage_file_path()):
            if not datatype.invalid:
                datatype.invalid = True
                dao.store_entity(datatype)
            return
        # 1. First update Operation fields:
        #    Update group field if possible
        new_group_name = new_data[CommonDetails.CODE_OPERATION_TAG]
        empty_group_value = (new_group_name is None or new_group_name == "")
        if from_group:
            if empty_group_value:
                raise StructureException("Empty group is not allowed!")

            group = dao.get_generic_entity(OperationGroup, new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
            if group and len(group) > 0 and new_group_name != group[0].name:
                group = group[0]
                exists_group = dao.get_generic_entity(OperationGroup, new_group_name, 'name')
                if exists_group:
                    raise StructureException("Group '" + new_group_name + "' already exists.")
                group.name = new_group_name
                dao.store_entity(group)
        else:
            operation = dao.get_operation_by_id(datatype.fk_from_operation)
            operation.user_group = new_group_name
            dao.store_entity(operation)

        # 2. Update dateType fields:
        datatype.subject = new_data[DataTypeOverlayDetails.DATA_SUBJECT]
        datatype.state = new_data[DataTypeOverlayDetails.DATA_STATE]
        if DataTypeOverlayDetails.DATA_TAG_1 in new_data:
            datatype.user_tag_1 = new_data[DataTypeOverlayDetails.DATA_TAG_1]
        if DataTypeOverlayDetails.DATA_TAG_2 in new_data:
            datatype.user_tag_2 = new_data[DataTypeOverlayDetails.DATA_TAG_2]
        if DataTypeOverlayDetails.DATA_TAG_3 in new_data:
            datatype.user_tag_3 = new_data[DataTypeOverlayDetails.DATA_TAG_3]
        if DataTypeOverlayDetails.DATA_TAG_4 in new_data:
            datatype.user_tag_4 = new_data[DataTypeOverlayDetails.DATA_TAG_4]
        if DataTypeOverlayDetails.DATA_TAG_5 in new_data:
            datatype.user_tag_5 = new_data[DataTypeOverlayDetails.DATA_TAG_5]

        datatype = dao.store_entity(datatype)
        # 3. Update MetaData in H5 as well.
        datatype.persist_full_metadata()
        # 4. Update the group_name/user_group into the operation meta-data file
        operation = dao.get_operation_by_id(datatype.fk_from_operation)
        self.structure_helper.update_operation_metadata(operation.project.name, new_group_name,
                                                        str(datatype.fk_from_operation), from_group)

    def get_datatype_and_datatypegroup_inputs_for_operation(self, operation_gid, selected_filter):
        """
        Returns the dataTypes that are used as input parameters for the given operation.
        'selected_filter' - is expected to be a visibility filter.

        If any dataType is part of a dataType group then the dataType group will
        be returned instead of that dataType.
        """
        all_datatypes = self._review_operation_inputs(operation_gid)[0]
        datatype_inputs = []
        for datatype in all_datatypes:
            if selected_filter.display_name == StaticFiltersFactory.RELEVANT_VIEW:
                if datatype.visible:
                    datatype_inputs.append(datatype)
            else:
                datatype_inputs.append(datatype)
        datatypes = []
        datatype_groups = dict()
        for data_type in datatype_inputs:
            if data_type.fk_datatype_group is None:
                datatypes.append(data_type)
            elif data_type.fk_datatype_group not in datatype_groups:
                dt_group = dao.get_datatype_by_id(data_type.fk_datatype_group)
                datatype_groups[data_type.fk_datatype_group] = dt_group

        datatypes.extend([v for _, v in six.iteritems(datatype_groups)])
        return datatypes

    def _review_operation_inputs(self, operation_gid):
        """
        :returns: A list of DataTypes that are used as input parameters for the specified operation.
                 And a dictionary will all operation parameters different then the default ones.
        """
        operation = dao.get_operation_by_gid(operation_gid)
        parameters = json.loads(operation.parameters)
        try:
            adapter = ABCAdapter.build_adapter(operation.algorithm)
            return adapter.review_operation_inputs(parameters)

        except Exception:
            self.logger.exception("Could not load details for operation %s" % operation_gid)
            inputs_datatypes = []
            changed_parameters = dict(Warning="Algorithm changed dramatically. We can not offer more details")
            for submit_param in parameters.values():
                self.logger.debug("Searching DT by GID %s" % submit_param)
                datatype = ABCAdapter.load_entity_by_gid(str(submit_param))
                if datatype is not None:
                    inputs_datatypes.append(datatype)
            return inputs_datatypes, changed_parameters

    def get_datatypes_inputs_for_operation_group(self, group_id, selected_filter):
        """
        Returns the dataType inputs for an operation group. If more dataTypes
        are part of the same dataType group then only the dataType group will
        be returned instead of them.
        """
        operations_gids = dao.get_operations_in_group(group_id, only_gids=True)
        op_group_inputs = dict()
        for gid in operations_gids:
            op_inputs = self.get_datatype_and_datatypegroup_inputs_for_operation(gid[0], selected_filter)
            for datatype in op_inputs:
                op_group_inputs[datatype.id] = datatype
        return list(op_group_inputs.values())

    @staticmethod
    def get_results_for_operation(operation_id, selected_filter=None):
        """
        Retrieve the DataTypes entities resulted after the execution of the given operation.
        """
        return dao.get_results_for_operation(operation_id, selected_filter)

    @staticmethod
    def get_operations_for_datatype_group(datatype_group_id, visibility_filter, only_in_groups=False):
        """
        Returns all the operations which uses as an input parameter a dataType from the given DataTypeGroup.
        visibility_filter - is a filter used for retrieving all the operations or only the relevant ones.

        If only_in_groups is True than this method will return only the operations that are
        part from an operation group, otherwise it will return only the operations that
        are NOT part of an operation group.
        """
        if visibility_filter.display_name != StaticFiltersFactory.RELEVANT_VIEW:
            return dao.get_operations_for_datatype_group(datatype_group_id, only_relevant=False,
                                                         only_in_groups=only_in_groups)
        return dao.get_operations_for_datatype_group(datatype_group_id, only_in_groups=only_in_groups)

    @staticmethod
    def get_operations_for_datatype(datatype_gid, visibility_filter, only_in_groups=False):
        """
        Returns all the operations which uses as an input parameter the dataType with the specified GID.

        If only_in_groups is True than this method will return only the operations that are part
        from an operation group, otherwise it will return only the operations that are NOT part of an operation group.
        """
        if visibility_filter.display_name != StaticFiltersFactory.RELEVANT_VIEW:
            return dao.get_operations_for_datatype(datatype_gid, only_relevant=False, only_in_groups=only_in_groups)
        return dao.get_operations_for_datatype(datatype_gid, only_in_groups=only_in_groups)

    @staticmethod
    def get_datatype_by_id(datatype_id):
        """Retrieve a DataType DB reference by its id."""
        return dao.get_datatype_by_id(datatype_id)

    @staticmethod
    def get_datatypegroup_by_gid(datatypegroup_gid):
        """ Returns the DataTypeGroup with the specified gid. """
        return dao.get_datatype_group_by_gid(datatypegroup_gid)

    @staticmethod
    def count_datatypes_generated_from(datatype_gid):
        """
        A list with all the datatypes resulted from operations that had as
        input the datatype given by 'datatype_gid'.
        """
        return dao.count_datatypes_generated_from(datatype_gid)

    @staticmethod
    def get_datatypegroup_by_op_group_id(operation_group_id):
        """ Returns the DataTypeGroup with the specified id. """
        return dao.get_datatypegroup_by_op_group_id(operation_group_id)

    @staticmethod
    def get_datatypes_in_project(project_id, only_visible=False):
        return dao.get_data_in_project(project_id, only_visible)


    @staticmethod
    def set_datatype_visibility(datatype_gid, is_visible):
        """
        Sets the dataType visibility. If the given dataType is a dataType group or it is part of a
        dataType group than this method will set the visibility for each dataType from this group.
        """

        def set_visibility(dt):
            """ set visibility flag, persist in db and h5"""
            dt.visible = is_visible
            dt = dao.store_entity(dt)
            dt.persist_full_metadata()

        def set_group_descendants_visibility(datatype_group_id):
            datatypes_in_group = dao.get_datatypes_from_datatype_group(datatype_group_id)
            for group_dt in datatypes_in_group:
                set_visibility(group_dt)

        datatype = dao.get_datatype_by_gid(datatype_gid)

        if isinstance(datatype, DataTypeGroup):  # datatype is a group
            set_group_descendants_visibility(datatype.id)
        elif datatype.fk_datatype_group is not None:  # datatype is member of a group
            set_group_descendants_visibility(datatype.fk_datatype_group)
            # the datatype to be updated is the parent datatype group
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)

        # update the datatype or datatype group.
        set_visibility(datatype)

    @staticmethod
    def is_datatype_group(datatype_gid):
        """ Used to check if the dataType with the specified GID is a DataTypeGroup. """
        return dao.is_datatype_group(datatype_gid)
class FilesHelperTest(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.entities.file.files_helper module.
    """ 
    PROJECT_NAME = "test_proj"
           
           
    def setUp(self):
        """
        Set up the context needed by the tests.
        """
        self.files_helper = FilesHelper()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user, self.PROJECT_NAME)
    
    
    def tearDown(self):
        """ Remove generated project during tests. """
        self.delete_project_folders()
    
    
    def test_check_created(self):
        """ Test standard flows for check created. """
        self.files_helper.check_created()
        self.assertTrue(os.path.exists(root_storage), "Storage not created!")
        
        self.files_helper.check_created(os.path.join(root_storage, "test"))
        self.assertTrue(os.path.exists(root_storage), "Storage not created!")
        self.assertTrue(os.path.exists(os.path.join(root_storage, "test")), "Test directory not created!")
            
    
    def test_get_project_folder(self):
        """
        Test the get_project_folder method which should create a folder in case
        it doesn't already exist.
        """
        project_path = self.files_helper.get_project_folder(self.test_project)
        self.assertTrue(os.path.exists(project_path), "Folder doesn't exist")
        
        folder_path = self.files_helper.get_project_folder(self.test_project, "43")
        self.assertTrue(os.path.exists(project_path), "Folder doesn't exist")
        self.assertTrue(os.path.exists(folder_path), "Folder doesn't exist")
        
   
    def test_rename_project_structure(self):
        """ Try to rename the folder structure of a project. Standard flow. """
        self.files_helper.get_project_folder(self.test_project)
        path, name = self.files_helper.rename_project_structure(self.test_project.name, "new_name")
        self.assertNotEqual(path, name, "Rename didn't take effect.")


    def test_rename_structure_same_name(self):
        """ Try to rename the folder structure of a project. Same name. """
        self.files_helper.get_project_folder(self.test_project)
        
        self.assertRaises(FileStructureException, self.files_helper.rename_project_structure, 
                          self.test_project.name, self.PROJECT_NAME)


    def test_remove_project_structure(self):
        """ Check that remove project structure deletes the corresponding folder. Standard flow. """
        full_path = self.files_helper.get_project_folder(self.test_project)
        self.assertTrue(os.path.exists(full_path), "Folder was not created.")
        
        self.files_helper.remove_project_structure(self.test_project.name)
        self.assertFalse(os.path.exists(full_path), "Project folder not deleted.")
        
    
    def test_write_project_metadata(self):
        """  Write XML for test-project. """
        self.files_helper.write_project_metadata(self.test_project)
        expected_file = self.files_helper.get_project_meta_file_path(self.PROJECT_NAME)
        self.assertTrue(os.path.exists(expected_file))
        project_meta = XMLReader(expected_file).read_metadata()
        loaded_project = model.Project(None, None)
        loaded_project.from_dict(project_meta, self.test_user.id)
        self.assertEqual(self.test_project.name, loaded_project.name)
        self.assertEqual(self.test_project.description, loaded_project.description)
        self.assertEqual(self.test_project.gid, loaded_project.gid)
        expected_dict = self.test_project.to_dict()[1]
        del expected_dict['last_updated']
        found_dict = loaded_project.to_dict()[1]
        del found_dict['last_updated']
        self.assertDictContainsSubset(expected_dict, found_dict)
        self.assertDictContainsSubset(found_dict, expected_dict)
    
    
    def test_write_operation_metadata(self):
        """
        Test that a correct XML is created for an operation.
        """
        operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
        expected_file = self.files_helper.get_operation_meta_file_path(self.PROJECT_NAME, operation.id)
        self.assertFalse(os.path.exists(expected_file))
        self.files_helper.write_operation_metadata(operation)
        self.assertTrue(os.path.exists(expected_file))
        operation_meta = XMLReader(expected_file).read_metadata()
        loaded_operation = model.Operation(None, None, None, None)
        loaded_operation.from_dict(operation_meta, dao)
        expected_dict = operation.to_dict()[1]
        found_dict = loaded_operation.to_dict()[1]
        for key, value in expected_dict.iteritems():
            self.assertEqual(str(value), str(found_dict[key]))
        # Now validate that operation metaData can be also updated
        self.assertNotEqual("new_group_name", found_dict['user_group'])
        self.files_helper.update_operation_metadata(self.PROJECT_NAME, "new_group_name", operation.id) 
        found_dict = XMLReader(expected_file).read_metadata()  
        self.assertEqual("new_group_name", found_dict['user_group'])
        
    
    def test_remove_dt_happy_flow(self):
        """
        Happy flow for removing a file related to a DataType.
        """
        folder_path = self.files_helper.get_project_folder(self.test_project, "42")
        datatype = MappedType()
        datatype.storage_path = folder_path
        open(datatype.get_storage_file_path(), 'w') 
        self.assertTrue(os.path.exists(datatype.get_storage_file_path()), "Test file was not created!")
        self.files_helper.remove_datatype(datatype) 
        self.assertFalse(os.path.exists(datatype.get_storage_file_path()), "Test file was not deleted!")      
        
        
    def test_remove_dt_non_existent(self):
        """
        Try to call remove on a dataType with no H5 file.
        Should work.
        """
        folder_path = self.files_helper.get_project_folder(self.test_project, "42")
        datatype = MappedType()
        datatype.storage_path = folder_path
        self.assertFalse(os.path.exists(datatype.get_storage_file_path()))
        self.files_helper.remove_datatype(datatype)
        

    def test_move_datatype(self):
        """
        Make sure associated H5 file is moved to a correct new location.
        """
        folder_path = self.files_helper.get_project_folder(self.test_project, "42")
        datatype = MappedType()
        datatype.storage_path = folder_path
        open(datatype.get_storage_file_path(), 'w') 
        self.assertTrue(os.path.exists(datatype.get_storage_file_path()), "Test file was not created!")
        self.files_helper.move_datatype(datatype, self.PROJECT_NAME + '11', "43") 
        
        self.assertFalse(os.path.exists(datatype.get_storage_file_path()), "Test file was not moved!")
        datatype.storage_path = self.files_helper.get_project_folder(self.PROJECT_NAME + '11', "43")
        self.assertTrue(os.path.exists(datatype.get_storage_file_path()), "Test file was not created!")
        
        
    def test_find_relative_path(self):
        """
        Tests that relative path is computed properly.
        """
        rel_path = self.files_helper.find_relative_path("/root/up/to/here/test/it/now", "/root/up/to/here")
        self.assertEqual(rel_path, os.sep.join(["test", "it", "now"]), "Did not extract relative path as expected.")
        
        
    def test_remove_files_valid(self):
        """
        Pass a valid list of files and check they are all removed.
        """
        file_list = ["test1", "test2", "test3"]
        for file_n in file_list:
            fp = open(file_n, 'w')
            fp.write('test')
            fp.close()
        for file_n in file_list:
            self.assertTrue(os.path.isfile(file_n))
        self.files_helper.remove_files(file_list)
        for file_n in file_list:
            self.assertFalse(os.path.isfile(file_n))


    def test_remove_folder(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        os.mkdir(folder_name)
        self.assertTrue(os.path.isdir(folder_name), "Folder should be created.")
        self.files_helper.remove_folder(folder_name)
        self.assertFalse(os.path.isdir(folder_name), "Folder should be deleted.")
        
    def test_remove_folder_non_existing_ignore_exc(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        self.assertFalse(os.path.isdir(folder_name), "Folder should not exist before call.")
        self.files_helper.remove_folder(folder_name, ignore_errors=True)
        
        
    def test_remove_folder_non_existing(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        self.assertFalse(os.path.isdir(folder_name), "Folder should not exist before call.")
        self.assertRaises(FileStructureException, self.files_helper.remove_folder, folder_name, False)
class TestProjectService(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.services.project_service module.
    """
    def transactional_setup_method(self):
        """
        Reset the database before each test.
        """
        self.project_service = ProjectService()
        self.structure_helper = FilesHelper()
        self.test_user = TestFactory.create_user()

    def transactional_teardown_method(self):
        """
        Remove project folders and clean up database.
        """
        created_projects = dao.get_projects_for_user(self.test_user.id)
        for project in created_projects:
            self.structure_helper.remove_project_structure(project.name)
        self.delete_project_folders()

    def test_create_project_happy_flow(self):

        user1 = TestFactory.create_user('test_user1')
        user2 = TestFactory.create_user('test_user2')
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        assert len(initial_projects) == 0, "Database reset probably failed!"

        TestFactory.create_project(self.test_user,
                                   'test_project',
                                   "description",
                                   users=[user1.id, user2.id])

        resulting_projects = dao.get_projects_for_user(self.test_user.id)
        assert len(
            resulting_projects) == 1, "Project with valid data not inserted!"
        project = resulting_projects[0]
        assert project.name == "test_project", "Invalid retrieved project name"
        assert project.description == "description", "Description do no match"

        users_for_project = dao.get_members_of_project(project.id)
        for user in users_for_project:
            assert user.id in [user1.id, user2.id,
                               self.test_user.id], "Users not stored properly."
        assert os.path.exists(
            os.path.join(TvbProfile.current.TVB_STORAGE,
                         FilesHelper.PROJECTS_FOLDER,
                         "test_project")), "Folder for project was not created"

    def test_create_project_empty_name(self):
        """
        Creating a project with an empty name.
        """
        data = dict(name="", description="test_description", users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        assert len(initial_projects) == 0, "Database reset probably failed!"
        with pytest.raises(ProjectServiceException):
            self.project_service.store_project(self.test_user, True, None,
                                               **data)

    def test_edit_project_happy_flow(self):
        """
        Standard flow for editing an existing project.
        """
        selected_project = TestFactory.create_project(self.test_user,
                                                      'test_proj')
        proj_root = self.structure_helper.get_project_folder(selected_project)
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        assert len(
            initial_projects) == 1, "Database initialization probably failed!"

        edited_data = dict(name="test_project",
                           description="test_description",
                           users=[])
        edited_project = self.project_service.store_project(
            self.test_user, False, selected_project.id, **edited_data)
        assert not os.path.exists(proj_root), "Previous folder not deleted"
        proj_root = self.structure_helper.get_project_folder(edited_project)
        assert os.path.exists(proj_root), "New folder not created!"
        assert selected_project.name != edited_project.name, "Project was no changed!"

    def test_edit_project_unexisting(self):
        """
        Trying to edit an un-existing project.
        """
        selected_project = TestFactory.create_project(self.test_user,
                                                      'test_proj')
        self.structure_helper.get_project_folder(selected_project)
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        assert len(
            initial_projects) == 1, "Database initialization probably failed!"
        data = dict(name="test_project",
                    description="test_description",
                    users=[])
        with pytest.raises(ProjectServiceException):
            self.project_service.store_project(self.test_user, False, 99,
                                               **data)

    def test_find_project_happy_flow(self):
        """
        Standard flow for finding a project by it's id.
        """
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        assert len(initial_projects) == 0, "Database reset probably failed!"
        inserted_project = TestFactory.create_project(self.test_user,
                                                      'test_project')
        assert self.project_service.find_project(
            inserted_project.id) is not None, "Project not found !"
        dao_returned_project = dao.get_project_by_id(inserted_project.id)
        service_returned_project = self.project_service.find_project(
            inserted_project.id)
        assert dao_returned_project.id == service_returned_project.id, \
            "Data returned from service is different from data returned by DAO."
        assert dao_returned_project.name == service_returned_project.name, \
            "Data returned from service is different than  data returned by DAO."
        assert dao_returned_project.description == service_returned_project.description, \
            "Data returned from service is different from data returned by DAO."
        assert dao_returned_project.members == service_returned_project.members, \
            "Data returned from service is different from data returned by DAO."

    def test_find_project_unexisting(self):
        """
        Searching for an un-existing project.
        """
        data = dict(name="test_project",
                    description="test_description",
                    users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        assert len(initial_projects) == 0, "Database reset probably failed!"
        project = self.project_service.store_project(self.test_user, True,
                                                     None, **data)
        # fetch a likely non-existing project. Previous project id plus a 'big' offset
        with pytest.raises(ProjectServiceException):
            self.project_service.find_project(project.id + 1033)

    def test_retrieve_projects_for_user(self):
        """
        Test for retrieving the projects for a given user. One page only.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        assert len(initial_projects) == 0, "Database was not reset properly!"
        TestFactory.create_project(self.test_user, 'test_proj')
        TestFactory.create_project(self.test_user, 'test_proj1')
        TestFactory.create_project(self.test_user, 'test_proj2')
        user1 = TestFactory.create_user('another_user')
        TestFactory.create_project(user1, 'test_proj3')
        projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        assert len(projects) == 3, "Projects not retrieved properly!"
        for project in projects:
            assert project.name != "test_project3", "This project should not have been retrieved"

    def test_retrieve_1project_3usr(self):
        """
        One user as admin, two users as members, getting projects for admin and for any of
        the members should return one.
        """
        member1 = TestFactory.create_user("member1")
        member2 = TestFactory.create_user("member2")
        TestFactory.create_project(self.test_user,
                                   'Testproject',
                                   users=[member1.id, member2.id])
        projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 1)[0]
        assert len(projects) == 1, "Projects not retrieved properly!"
        projects = self.project_service.retrieve_projects_for_user(
            member1.id, 1)[0]
        assert len(projects) == 1, "Projects not retrieved properly!"
        projects = self.project_service.retrieve_projects_for_user(
            member2.id, 1)[0]
        assert len(projects) == 1, "Projects not retrieved properly!"

    def test_retrieve_3projects_3usr(self):
        """
        Three users, 3 projects. Structure of db:
        proj1: {admin: user1, members: [user2, user3]}
        proj2: {admin: user2, members: [user1]}
        proj3: {admin: user3, members: [user1, user2]}
        Check valid project returns for all the users.
        """
        member1 = TestFactory.create_user("member1")
        member2 = TestFactory.create_user("member2")
        member3 = TestFactory.create_user("member3")
        TestFactory.create_project(member1,
                                   'TestProject1',
                                   users=[member2.id, member3.id])
        TestFactory.create_project(member2, 'TestProject2', users=[member1.id])
        TestFactory.create_project(member3,
                                   'TestProject3',
                                   users=[member1.id, member2.id])
        projects = self.project_service.retrieve_projects_for_user(
            member1.id, 1)[0]
        assert len(projects) == 3, "Projects not retrieved properly!"
        projects = self.project_service.retrieve_projects_for_user(
            member2.id, 1)[0]
        assert len(projects) == 3, "Projects not retrieved properly!"
        projects = self.project_service.retrieve_projects_for_user(
            member3.id, 1)[0]
        assert len(projects) == 2, "Projects not retrieved properly!"

    def test_retrieve_projects_random(self):
        """
        Generate a large number of users/projects, and validate the results.
        """
        ExtremeTestFactory.generate_users(NR_USERS, MAX_PROJ_PER_USER)
        for i in range(NR_USERS):
            current_user = dao.get_user_by_name("gen" + str(i))
            expected_projects = ExtremeTestFactory.VALIDATION_DICT[
                current_user.id]
            if expected_projects % PROJECTS_PAGE_SIZE == 0:
                expected_pages = expected_projects / PROJECTS_PAGE_SIZE
                exp_proj_per_page = PROJECTS_PAGE_SIZE
            else:
                expected_pages = expected_projects // PROJECTS_PAGE_SIZE + 1
                exp_proj_per_page = expected_projects % PROJECTS_PAGE_SIZE
            if expected_projects == 0:
                expected_pages = 0
                exp_proj_per_page = 0
            projects, pages = self.project_service.retrieve_projects_for_user(
                current_user.id, expected_pages)
            assert len(projects) == exp_proj_per_page, "Projects not retrieved properly! Expected:" + \
                                                       str(exp_proj_per_page) + "but got:" + str(len(projects))
            assert pages == expected_pages, "Pages not retrieved properly!"

        for folder in os.listdir(TvbProfile.current.TVB_STORAGE):
            full_path = os.path.join(TvbProfile.current.TVB_STORAGE, folder)
            if os.path.isdir(full_path) and folder.startswith('Generated'):
                shutil.rmtree(full_path)

    def test_retrieve_projects_page2(self):
        """
        Test for retrieving the second page projects for a given user.
        """
        for i in range(PROJECTS_PAGE_SIZE + 3):
            TestFactory.create_project(self.test_user, 'test_proj' + str(i))
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 2)
        assert len(projects) == (PROJECTS_PAGE_SIZE + 3
                                 ) % PROJECTS_PAGE_SIZE, "Pagination inproper."
        assert pages == 2, 'Wrong number of pages retrieved.'

    def test_retrieve_projects_and_del(self):
        """
        Test for retrieving the second page projects for a given user.
        """
        created_projects = []
        for i in range(PROJECTS_PAGE_SIZE + 1):
            created_projects.append(
                TestFactory.create_project(self.test_user,
                                           'test_proj' + str(i)))
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 2)
        assert len(projects) == (PROJECTS_PAGE_SIZE + 1
                                 ) % PROJECTS_PAGE_SIZE, "Pagination improper."
        assert pages == (PROJECTS_PAGE_SIZE +
                         1) // PROJECTS_PAGE_SIZE + 1, 'Wrong number of pages'
        self.project_service.remove_project(created_projects[1].id)
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 2)
        assert len(projects) == 0, "Pagination improper."
        assert pages == 1, 'Wrong number of pages retrieved.'
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 1)
        assert len(projects) == PROJECTS_PAGE_SIZE, "Pagination improper."
        assert pages == 1, 'Wrong number of pages retrieved.'

    def test_empty_project_has_zero_disk_size(self):
        TestFactory.create_project(self.test_user, 'test_proj')
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id)
        assert 0 == projects[0].disk_size
        assert '0.0 KiB' == projects[0].disk_size_human

    def test_project_disk_size(self):
        project1 = TestFactory.create_project(self.test_user, 'test_proj1')
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_66.zip')
        TestFactory.import_zip_connectivity(self.test_user, project1, zip_path,
                                            'testSubject')

        project2 = TestFactory.create_project(self.test_user, 'test_proj2')
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_76.zip')
        TestFactory.import_zip_connectivity(self.test_user, project2, zip_path,
                                            'testSubject')

        projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        assert projects[0].disk_size != projects[
            1].disk_size, "projects should have different size"

        for project in projects:
            assert 0 != project.disk_size
            assert '0.0 KiB' != project.disk_size_human

            prj_folder = self.structure_helper.get_project_folder(project)
            actual_disk_size = FilesHelper.compute_recursive_h5_disk_usage(
                prj_folder)

            ratio = float(actual_disk_size) / project.disk_size
            msg = "Real disk usage: %s The one recorded in the db : %s" % (
                actual_disk_size, project.disk_size)
            assert ratio < 1.1, msg

    def test_get_linkable_projects(self):
        """
        Test for retrieving the projects for a given user.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        assert len(initial_projects) == 0, "Database was not reset!"
        test_proj = []
        user1 = TestFactory.create_user("another_user")
        for i in range(4):
            test_proj.append(
                TestFactory.create_project(self.test_user if i < 3 else user1,
                                           'test_proj' + str(i)))
        operation = TestFactory.create_operation(test_user=self.test_user,
                                                 test_project=test_proj[0])
        datatype = dao.store_entity(
            model_datatype.DataType(module="test_data",
                                    subject="subj1",
                                    state="test_state",
                                    operation_id=operation.id))

        linkable = self.project_service.get_linkable_projects_for_user(
            self.test_user.id, str(datatype.id))[0]

        assert len(linkable) == 2, "Wrong count of link-able projects!"
        proj_names = [project.name for project in linkable]
        assert test_proj[1].name in proj_names
        assert test_proj[2].name in proj_names
        assert not test_proj[3].name in proj_names

    def test_remove_project_happy_flow(self):
        """
        Standard flow for deleting a project.
        """
        inserted_project = TestFactory.create_project(self.test_user,
                                                      'test_proj')
        project_root = self.structure_helper.get_project_folder(
            inserted_project)
        projects = dao.get_projects_for_user(self.test_user.id)
        assert len(projects) == 1, "Initializations failed!"
        assert os.path.exists(project_root), "Something failed at insert time!"
        self.project_service.remove_project(inserted_project.id)
        projects = dao.get_projects_for_user(self.test_user.id)
        assert len(projects) == 0, "Project was not deleted!"
        assert not os.path.exists(project_root), "Root folder not deleted!"

    def test_remove_project_wrong_id(self):
        """
        Flow for deleting a project giving an un-existing id.
        """
        TestFactory.create_project(self.test_user, 'test_proj')
        projects = dao.get_projects_for_user(self.test_user.id)
        assert len(projects) == 1, "Initializations failed!"
        with pytest.raises(ProjectServiceException):
            self.project_service.remove_project(99)

    def __check_meta_data(self, expected_meta_data, new_datatype):
        """Validate Meta-Data"""
        mapp_keys = {
            DataTypeOverlayDetails.DATA_SUBJECT: "subject",
            DataTypeOverlayDetails.DATA_STATE: "state"
        }
        for key, value in expected_meta_data.items():
            if key in mapp_keys:
                assert value == getattr(new_datatype, mapp_keys[key])
            elif key == DataTypeMetaData.KEY_OPERATION_TAG:
                if DataTypeMetaData.KEY_OP_GROUP_ID in expected_meta_data:
                    # We have a Group to check
                    op_group = new_datatype.parent_operation.fk_operation_group
                    op_group = dao.get_generic_entity(
                        model_operation.OperationGroup, op_group)[0]
                    assert value == op_group.name
                else:
                    assert value == new_datatype.parent_operation.user_group

    def test_remove_project_node(self):
        """
        Test removing of a node from a project.
        """
        inserted_project, gid, op = TestFactory.create_value_wrapper(
            self.test_user)
        project_to_link = model_project.Project("Link", self.test_user.id,
                                                "descript")
        project_to_link = dao.store_entity(project_to_link)
        exact_data = dao.get_datatype_by_gid(gid)
        assert exact_data is not None, "Initialization problem!"
        dao.store_entity(
            model_datatype.Links(exact_data.id, project_to_link.id))

        vw_h5_path = h5.path_for_stored_index(exact_data)
        assert os.path.exists(vw_h5_path)

        if dao.get_system_user() is None:
            dao.store_entity(
                model_operation.User(
                    TvbProfile.current.web.admin.SYSTEM_USER_NAME,
                    TvbProfile.current.web.admin.SYSTEM_USER_NAME, None, None,
                    True, None))

        self.project_service._remove_project_node_files(
            inserted_project.id, gid)

        assert not os.path.exists(vw_h5_path)
        exact_data = dao.get_datatype_by_gid(gid)
        assert exact_data is not None, "Data should still be in DB, because of links"
        vw_h5_path_new = h5.path_for_stored_index(exact_data)
        assert os.path.exists(vw_h5_path_new)
        assert vw_h5_path_new != vw_h5_path

        self.project_service._remove_project_node_files(
            project_to_link.id, gid)
        assert dao.get_datatype_by_gid(gid) is None

    def test_update_meta_data_simple(self):
        """
        Test the new update metaData for a simple data that is not part of a group.
        """
        inserted_project, gid, _ = TestFactory.create_value_wrapper(
            self.test_user)
        new_meta_data = {
            DataTypeOverlayDetails.DATA_SUBJECT: "new subject",
            DataTypeOverlayDetails.DATA_STATE: "second_state",
            DataTypeOverlayDetails.CODE_GID: gid,
            DataTypeOverlayDetails.CODE_OPERATION_TAG: 'new user group'
        }
        self.project_service.update_metadata(new_meta_data)

        new_datatype = dao.get_datatype_by_gid(gid)
        self.__check_meta_data(new_meta_data, new_datatype)

        new_datatype_h5 = h5.h5_file_for_index(new_datatype)
        assert new_datatype_h5.subject.load(
        ) == 'new subject', 'UserGroup not updated!'

    def test_update_meta_data_group(self, test_adapter_factory):
        """
        Test the new update metaData for a group of dataTypes.
        """
        test_adapter_factory(adapter_class=TestAdapter3)
        op_group_id = TestFactory.create_group(test_user=self.test_user)[1]

        new_meta_data = {
            DataTypeOverlayDetails.DATA_SUBJECT: "new subject",
            DataTypeOverlayDetails.DATA_STATE: "updated_state",
            DataTypeOverlayDetails.CODE_OPERATION_GROUP_ID: op_group_id,
            DataTypeOverlayDetails.CODE_OPERATION_TAG: 'newGroupName'
        }
        self.project_service.update_metadata(new_meta_data)
        datatypes = dao.get_datatype_in_group(op_group_id)
        for datatype in datatypes:
            new_datatype = dao.get_datatype_by_id(datatype.id)
            assert op_group_id == new_datatype.parent_operation.fk_operation_group
            new_group = dao.get_generic_entity(model_operation.OperationGroup,
                                               op_group_id)[0]
            assert new_group.name == "newGroupName"
            self.__check_meta_data(new_meta_data, new_datatype)

    def test_retrieve_project_full(self, dummy_datatype_index_factory):
        """
        Tests full project information is retrieved by method `ProjectService.retrieve_project_full(...)`
        """

        project = TestFactory.create_project(self.test_user)
        operation = TestFactory.create_operation(test_user=self.test_user,
                                                 test_project=project)

        dummy_datatype_index_factory(project=project, operation=operation)
        dummy_datatype_index_factory(project=project, operation=operation)
        dummy_datatype_index_factory(project=project, operation=operation)

        _, ops_nr, operations, pages_no = self.project_service.retrieve_project_full(
            project.id)
        assert ops_nr == 1, "DataType Factory should only use one operation to store all it's datatypes."
        assert pages_no == 1, "DataType Factory should only use one operation to store all it's datatypes."
        resulted_dts = operations[0]['results']
        assert len(resulted_dts) == 3, "3 datatypes should be created."

    def test_get_project_structure(self, datatype_group_factory,
                                   dummy_datatype_index_factory,
                                   project_factory, user_factory):
        """
        Tests project structure is as expected and contains all datatypes and created links
        """
        user = user_factory()
        project1 = project_factory(user, name="TestPS1")
        project2 = project_factory(user, name="TestPS2")

        dt_group = datatype_group_factory(project=project1)
        dt_simple = dummy_datatype_index_factory(state="RAW_DATA",
                                                 project=project1)
        # Create 3 DTs directly in Project 2
        dummy_datatype_index_factory(state="RAW_DATA", project=project2)
        dummy_datatype_index_factory(state="RAW_DATA", project=project2)
        dummy_datatype_index_factory(state="RAW_DATA", project=project2)

        # Create Links from Project 1 into Project 2
        link_ids, expected_links = [], []
        link_ids.append(dt_simple.id)
        expected_links.append(dt_simple.gid)

        # Prepare links towards a full DT Group, but expecting only the DT_Group in the final tree
        dts = dao.get_datatype_in_group(datatype_group_id=dt_group.id)
        link_ids.extend([dt_to_link.id for dt_to_link in dts])
        link_ids.append(dt_group.id)
        expected_links.append(dt_group.gid)

        # Actually create the links from Prj1 into Prj2
        AlgorithmService().create_link(link_ids, project2.id)

        # Retrieve the raw data used to compose the tree (for easy parsing)
        dts_in_tree = dao.get_data_in_project(project2.id)
        dts_in_tree = [dt.gid for dt in dts_in_tree]
        # Retrieve the tree json (for trivial validations only, as we can not decode)
        node_json = self.project_service.get_project_structure(
            project2, None, DataTypeMetaData.KEY_STATE,
            DataTypeMetaData.KEY_SUBJECT, None)

        assert len(expected_links) + 3 == len(
            dts_in_tree), "invalid number of nodes in tree"
        assert dt_group.gid in dts_in_tree, "DT_Group should be in the Project Tree!"
        assert dt_group.gid in node_json, "DT_Group should be in the Project Tree JSON!"

        project_dts = dao.get_datatypes_in_project(project2.id)
        for dt in project_dts:
            if dt.fk_datatype_group is not None:
                assert not dt.gid in node_json, "DTs part of a group should not be"
                assert not dt.gid in dts_in_tree, "DTs part of a group should not be"
            else:
                assert dt.gid in node_json, "Simple DTs and DT_Groups should be"
                assert dt.gid in dts_in_tree, "Simple DTs and DT_Groups should be"

        for link_gid in expected_links:
            assert link_gid in node_json, "Expected Link not present"
            assert link_gid in dts_in_tree, "Expected Link not present"
class TestSimulationResource(TransactionalTestCase):
    def transactional_setup_method(self):
        self.test_user = TestFactory.create_user('Rest_User')
        self.test_project = TestFactory.create_project(self.test_user,
                                                       'Rest_Project')
        self.simulation_resource = FireSimulationResource()
        self.files_helper = FilesHelper()

    def test_server_fire_simulation_inexistent_gid(self, mocker):
        project_gid = "inexistent-gid"
        dummy_file = FileStorage(BytesIO(b"test"), 'test.zip')
        # Mock flask.request.files to return a dictionary
        request_mock = mocker.patch.object(flask, 'request')
        request_mock.files = {'file': dummy_file}

        with pytest.raises(InvalidIdentifierException):
            self.simulation_resource.post(project_gid)

    def test_server_fire_simulation_no_file(self, mocker):
        # Mock flask.request.files to return a dictionary
        request_mock = mocker.patch.object(flask, 'request')
        request_mock.files = {}

        with pytest.raises(BadRequestException):
            self.simulation_resource.post('')

    def test_server_fire_simulation_bad_extension(self, mocker):
        dummy_file = FileStorage(BytesIO(b"test"), 'test.txt')
        # Mock flask.request.files to return a dictionary
        request_mock = mocker.patch.object(flask, 'request')
        request_mock.files = {'file': dummy_file}

        with pytest.raises(BadRequestException):
            self.simulation_resource.post('')

    def test_server_fire_simulation(self, mocker, connectivity_factory):
        input_folder = self.files_helper.get_project_folder(self.test_project)
        sim_dir = os.path.join(input_folder, 'test_sim')
        if not os.path.isdir(sim_dir):
            os.makedirs(sim_dir)

        simulator = Simulator()
        simulator.connectivity = connectivity_factory()
        sim_serializer = SimulatorSerializer()
        sim_serializer.serialize_simulator(simulator, simulator.gid.hex, None,
                                           sim_dir)

        zip_filename = shutil.make_archive(sim_dir, 'zip', input_folder)

        # Mock flask.request.files to return a dictionary
        request_mock = mocker.patch.object(flask, 'request')
        fp = open(zip_filename, 'rb')
        request_mock.files = {
            'file': FileStorage(fp, os.path.basename(zip_filename))
        }

        def launch_sim(self, user_id, project, algorithm, zip_folder_path,
                       simulator_file):
            return Operation('', '', '', {})

        # Mock simulation launch
        mocker.patch.object(SimulatorService, 'prepare_simulation_on_server',
                            launch_sim)

        operation_gid, status = self.simulation_resource.post(
            self.test_project.gid)
        fp.close()

        assert type(operation_gid) is str
        assert status == 201

    def transactional_teardown_method(self):
        self.files_helper.remove_project_structure(self.test_project.name)
Пример #6
0
class TestFilesHelper(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.entities.file.files_helper module.
    """ 
    PROJECT_NAME = "test_proj"

    def transactional_setup_method(self):
        """
        Set up the context needed by the tests.
        """
        self.files_helper = FilesHelper()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user, self.PROJECT_NAME)

    def transactional_teardown_method(self):
        """ Remove generated project during tests. """
        self.delete_project_folders()

    def test_check_created(self):
        """ Test standard flows for check created. """
        self.files_helper.check_created()
        assert os.path.exists(root_storage), "Storage not created!"
        
        self.files_helper.check_created(os.path.join(root_storage, "test"))
        assert os.path.exists(root_storage), "Storage not created!"
        assert os.path.exists(os.path.join(root_storage, "test")), "Test directory not created!"

    def test_get_project_folder(self):
        """
        Test the get_project_folder method which should create a folder in case
        it doesn't already exist.
        """
        project_path = self.files_helper.get_project_folder(self.test_project)
        assert os.path.exists(project_path), "Folder doesn't exist"
        
        folder_path = self.files_helper.get_project_folder(self.test_project, "43")
        assert os.path.exists(project_path), "Folder doesn't exist"
        assert os.path.exists(folder_path), "Folder doesn't exist"

    def test_rename_project_structure(self):
        """ Try to rename the folder structure of a project. Standard flow. """
        self.files_helper.get_project_folder(self.test_project)
        path, name = self.files_helper.rename_project_structure(self.test_project.name, "new_name")
        assert path != name, "Rename didn't take effect."

    def test_rename_structure_same_name(self):
        """ Try to rename the folder structure of a project. Same name. """
        self.files_helper.get_project_folder(self.test_project)
        
        with pytest.raises(FileStructureException):
            self.files_helper.rename_project_structure(self.test_project.name, self.PROJECT_NAME)

    def test_remove_project_structure(self):
        """ Check that remove project structure deletes the corresponding folder. Standard flow. """
        full_path = self.files_helper.get_project_folder(self.test_project)
        assert os.path.exists(full_path), "Folder was not created."
        
        self.files_helper.remove_project_structure(self.test_project.name)
        assert not os.path.exists(full_path), "Project folder not deleted."

    def test_write_project_metadata(self):
        """  Write XML for test-project. """
        self.files_helper.write_project_metadata(self.test_project)
        expected_file = self.files_helper.get_project_meta_file_path(self.PROJECT_NAME)
        assert os.path.exists(expected_file)
        project_meta = XMLReader(expected_file).read_metadata()
        loaded_project = model_project.Project(None, None)
        loaded_project.from_dict(project_meta, self.test_user.id)
        assert self.test_project.name == loaded_project.name
        assert self.test_project.description == loaded_project.description
        assert self.test_project.gid == loaded_project.gid
        expected_dict = self.test_project.to_dict()[1]
        del expected_dict['last_updated']
        found_dict = loaded_project.to_dict()[1]
        del found_dict['last_updated']
        self._dictContainsSubset(expected_dict, found_dict)
        self._dictContainsSubset(found_dict, expected_dict)

    def test_write_operation_metadata(self):
        """
        Test that a correct XML is created for an operation.
        """
        operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
        expected_file = self.files_helper.get_operation_meta_file_path(self.PROJECT_NAME, operation.id)
        assert not os.path.exists(expected_file)
        self.files_helper.write_operation_metadata(operation)
        assert os.path.exists(expected_file)
        operation_meta = XMLReader(expected_file).read_metadata()
        loaded_operation = model_operation.Operation(None, None, None, None)
        loaded_operation.from_dict(operation_meta, dao, user_id=self.test_user.id)
        expected_dict = operation.to_dict()[1]
        found_dict = loaded_operation.to_dict()[1]
        for key, value in expected_dict.items():
            assert str(value) == str(found_dict[key])
        # Now validate that operation metaData can be also updated
        assert "new_group_name" != found_dict['user_group']
        self.files_helper.update_operation_metadata(self.PROJECT_NAME, "new_group_name", operation.id) 
        found_dict = XMLReader(expected_file).read_metadata()  
        assert "new_group_name" == found_dict['user_group']

    def test_remove_dt_happy_flow(self, dummy_datatype_index_factory):
        """
        Happy flow for removing a file related to a DataType.
        """
        datatype = dummy_datatype_index_factory()
        h5_path = h5.path_for_stored_index(datatype)
        assert os.path.exists(h5_path), "Test file was not created!"
        self.files_helper.remove_datatype_file(h5_path)
        assert not os.path.exists(h5_path), "Test file was not deleted!"

    def test_remove_dt_non_existent(self, dummy_datatype_index_factory):
        """
        Try to call remove on a dataType with no H5 file.
        Should work.
        """
        datatype = dummy_datatype_index_factory()
        h5_path = h5.path_for_stored_index(datatype)
        wrong_path = os.path.join(h5_path, "WRONG_PATH")
        assert not os.path.exists(wrong_path)
        self.files_helper.remove_datatype_file(wrong_path)

    def test_move_datatype(self, dummy_datatype_index_factory):
        """
        Make sure associated H5 file is moved to a correct new location.
        """
        datatype = dummy_datatype_index_factory(project=self.test_project)
        old_file_path = h5.path_for_stored_index(datatype)
        assert os.path.exists(old_file_path), "Test file was not created!"
        full_path = h5.path_for_stored_index(datatype)
        self.files_helper.move_datatype(datatype, self.PROJECT_NAME + '2', "1", full_path)
        
        assert not os.path.exists(old_file_path), "Test file was not moved!"
        datatype.fk_from_operation = 43
        new_file_path = os.path.join(self.files_helper.get_project_folder(self.PROJECT_NAME + '2', "1"), old_file_path.split("\\")[-1])
        assert os.path.exists(new_file_path), "Test file was not created!"

    def test_find_relative_path(self):
        """
        Tests that relative path is computed properly.
        """
        rel_path = self.files_helper.find_relative_path("/root/up/to/here/test/it/now", "/root/up/to/here")
        assert rel_path == os.sep.join(["test", "it", "now"]), "Did not extract relative path as expected."

    def test_remove_files_valid(self):
        """
        Pass a valid list of files and check they are all removed.
        """
        file_list = ["test1", "test2", "test3"]
        for file_n in file_list:
            fp = open(file_n, 'w')
            fp.write('test')
            fp.close()
        for file_n in file_list:
            assert os.path.isfile(file_n)
        self.files_helper.remove_files(file_list)
        for file_n in file_list:
            assert not os.path.isfile(file_n)

    def test_remove_folder(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        os.mkdir(folder_name)
        assert os.path.isdir(folder_name), "Folder should be created."
        self.files_helper.remove_folder(folder_name)
        assert not os.path.isdir(folder_name), "Folder should be deleted."
        
    def test_remove_folder_non_existing_ignore_exc(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        assert not os.path.isdir(folder_name), "Folder should not exist before call."
        self.files_helper.remove_folder(folder_name, ignore_errors=True)

    def test_remove_folder_non_existing(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        assert not os.path.isdir(folder_name), "Folder should not exist before call."
        with pytest.raises(FileStructureException):
            self.files_helper.remove_folder(folder_name, False)

    def _dictContainsSubset(self, expected, actual, msg=None):
        """Checks whether actual is a superset of expected."""
        missing = []
        mismatched = []
        for key, value in expected.items():
            if key not in actual:
                return False
            elif value != actual[key]:
                return False
        return True
Пример #7
0
class TestHPCSchedulerClient(BaseTestCase):
    def setup_method(self):
        self.files_helper = FilesHelper()
        self.encryption_handler = EncryptionHandler('123')
        self.clean_database()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)

    def _prepare_dummy_files(self, tmpdir):
        dummy_file1 = os.path.join(str(tmpdir), 'dummy1.txt')
        open(dummy_file1, 'a').close()
        dummy_file2 = os.path.join(str(tmpdir), 'dummy2.txt')
        open(dummy_file2, 'a').close()
        job_inputs = [dummy_file1, dummy_file2]
        return job_inputs

    def test_encrypt_inputs(self, tmpdir):
        job_inputs = self._prepare_dummy_files(tmpdir)
        job_encrypted_inputs = self.encryption_handler.encrypt_inputs(
            job_inputs)
        # Encrypted folder has 2 more files are more then plain folder
        assert len(job_encrypted_inputs) == len(job_inputs)

    def test_decrypt_results(self, tmpdir):
        # Prepare encrypted dir
        job_inputs = self._prepare_dummy_files(tmpdir)
        self.encryption_handler.encrypt_inputs(job_inputs)
        encrypted_dir = self.encryption_handler.get_encrypted_dir()

        # Unencrypt data
        out_dir = os.path.join(str(tmpdir), 'output')
        os.mkdir(out_dir)
        self.encryption_handler.decrypt_results_to_dir(out_dir)
        list_plain_dir = os.listdir(out_dir)
        assert len(list_plain_dir) == len(os.listdir(encrypted_dir))
        assert 'dummy1.txt' in list_plain_dir
        assert 'dummy2.txt' in list_plain_dir

    def test_decrypt_files(self, tmpdir):
        # Prepare encrypted dir
        job_inputs = self._prepare_dummy_files(tmpdir)
        enc_files = self.encryption_handler.encrypt_inputs(job_inputs)

        # Unencrypt data
        out_dir = os.path.join(str(tmpdir), 'output')
        os.mkdir(out_dir)
        self.encryption_handler.decrypt_files_to_dir([enc_files[1]], out_dir)
        list_plain_dir = os.listdir(out_dir)
        assert len(list_plain_dir) == 1
        assert os.path.basename(enc_files[0]).replace('.aes',
                                                      '') not in list_plain_dir
        assert os.path.basename(enc_files[1]).replace('.aes',
                                                      '') in list_plain_dir

    def test_do_operation_launch(self, simulator_factory, operation_factory,
                                 mocker):
        # Prepare encrypted dir
        op = operation_factory(test_user=self.test_user,
                               test_project=self.test_project)
        sim_folder, sim_gid = simulator_factory(op=op)

        self._do_operation_launch(op, sim_gid, mocker)

    def _do_operation_launch(self, op, sim_gid, mocker, is_pse=False):
        # Prepare encrypted dir
        self.encryption_handler = EncryptionHandler(sim_gid)
        job_encrypted_inputs = HPCSchedulerClient()._prepare_input(op, sim_gid)
        self.encryption_handler.encrypt_inputs(job_encrypted_inputs)
        encrypted_dir = self.encryption_handler.get_encrypted_dir()

        mocker.patch('tvb.core.operation_hpc_launcher._request_passfile',
                     _request_passfile_dummy)
        mocker.patch(
            'tvb.core.operation_hpc_launcher._update_operation_status',
            _update_operation_status)

        # Call do_operation_launch similarly to CSCS env
        plain_dir = self.files_helper.get_project_folder(
            self.test_project, 'plain')
        do_operation_launch(sim_gid.hex, 1000, is_pse, '', op.id, plain_dir)
        assert len(os.listdir(encrypted_dir)) == 7
        output_path = os.path.join(encrypted_dir,
                                   HPCSimulatorAdapter.OUTPUT_FOLDER)
        assert os.path.exists(output_path)
        expected_files = 2
        if is_pse:
            expected_files = 3
        assert len(os.listdir(output_path)) == expected_files
        return output_path

    def test_do_operation_launch_pse(self, simulator_factory,
                                     operation_factory, mocker):
        op = operation_factory(test_user=self.test_user,
                               test_project=self.test_project)
        sim_folder, sim_gid = simulator_factory(op=op)
        self._do_operation_launch(op, sim_gid, mocker, is_pse=True)

    def test_prepare_inputs(self, operation_factory, simulator_factory):
        op = operation_factory(test_user=self.test_user,
                               test_project=self.test_project)
        sim_folder, sim_gid = simulator_factory(op=op)
        hpc_client = HPCSchedulerClient()
        input_files = hpc_client._prepare_input(op, sim_gid)
        assert len(input_files) == 6

    def test_prepare_inputs_with_surface(self, operation_factory,
                                         simulator_factory):
        op = operation_factory(test_user=self.test_user,
                               test_project=self.test_project)
        sim_folder, sim_gid = simulator_factory(op=op, with_surface=True)
        hpc_client = HPCSchedulerClient()
        input_files = hpc_client._prepare_input(op, sim_gid)
        assert len(input_files) == 9

    def test_prepare_inputs_with_eeg_monitor(self, operation_factory,
                                             simulator_factory,
                                             surface_index_factory,
                                             sensors_index_factory,
                                             region_mapping_index_factory,
                                             connectivity_index_factory):
        surface_idx, surface = surface_index_factory(cortical=True)
        sensors_idx, sensors = sensors_index_factory()
        proj = ProjectionSurfaceEEG(sensors=sensors,
                                    sources=surface,
                                    projection_data=numpy.ones(3))

        op = operation_factory()
        storage_path = FilesHelper().get_project_folder(op.project, str(op.id))
        prj_db_db = h5.store_complete(proj, storage_path)
        prj_db_db.fk_from_operation = op.id
        dao.store_entity(prj_db_db)

        connectivity = connectivity_index_factory(76, op)
        rm_index = region_mapping_index_factory(conn_gid=connectivity.gid,
                                                surface_gid=surface_idx.gid)

        eeg_monitor = EEGViewModel(projection=proj.gid, sensors=sensors.gid)
        eeg_monitor.region_mapping = rm_index.gid

        sim_folder, sim_gid = simulator_factory(op=op,
                                                monitor=eeg_monitor,
                                                conn_gid=connectivity.gid)
        hpc_client = HPCSchedulerClient()
        input_files = hpc_client._prepare_input(op, sim_gid)
        assert len(input_files) == 11

    def test_stage_out_to_operation_folder(self, mocker, operation_factory,
                                           simulator_factory,
                                           pse_burst_configuration_factory):
        burst = pse_burst_configuration_factory(self.test_project)
        op = operation_factory(test_user=self.test_user,
                               test_project=self.test_project)
        op.fk_operation_group = burst.fk_operation_group
        dao.store_entity(op)

        sim_folder, sim_gid = simulator_factory(op=op)
        burst.simulator_gid = sim_gid.hex
        dao.store_entity(burst)

        output_path = self._do_operation_launch(op,
                                                sim_gid,
                                                mocker,
                                                is_pse=True)

        def _stage_out_dummy(dir, sim_gid):
            return [
                os.path.join(output_path, enc_file)
                for enc_file in os.listdir(output_path)
            ]

        mocker.patch.object(HPCSchedulerClient, '_stage_out_results',
                            _stage_out_dummy)
        sim_results_files, metric_op, metric_file = HPCSchedulerClient.stage_out_to_operation_folder(
            None, op, sim_gid)
        assert op.id != metric_op.id
        assert os.path.exists(metric_file)
        assert len(sim_results_files) == 1
        assert os.path.exists(sim_results_files[0])

    def teardown_method(self):
        encrypted_dir = self.encryption_handler.get_encrypted_dir()
        if os.path.isdir(encrypted_dir):
            shutil.rmtree(encrypted_dir)
        passfile = self.encryption_handler.get_password_file()
        if os.path.exists(passfile):
            os.remove(passfile)
        self.files_helper.remove_project_structure(self.test_project.name)
        self.clean_database()
class ProjectService:
    """
    Services layer for Project entities.
    """


    def __init__(self):
        self.logger = get_logger(__name__)
        self.structure_helper = FilesHelper()


    def store_project(self, current_user, is_create, selected_id, **data):
        """
        We want to create/update a project entity.
        """
        # Validate Unique Name
        new_name = data["name"]
        if len(new_name) < 1:
            raise ProjectServiceException("Invalid project name!")
        projects_no = dao.count_projects_for_name(new_name, selected_id)
        if projects_no > 0:
            err = {'name': 'Please choose another name, this one is used!'}
            raise formencode.Invalid("Duplicate Name Error", {}, None, error_dict=err)
        started_operations = dao.get_operation_numbers(selected_id)[1]
        if started_operations > 0:
            raise ProjectServiceException("A project can not be renamed while operations are still running!")
        if is_create:
            current_proj = model.Project(new_name, current_user.id, data["description"])
            self.structure_helper.get_project_folder(current_proj)
        else:
            try:
                current_proj = dao.get_project_by_id(selected_id)
            except Exception as excep:
                self.logger.exception("An error has occurred!")
                raise ProjectServiceException(str(excep))
            if current_proj.name != new_name:
                self.structure_helper.rename_project_structure(current_proj.name, new_name)
            current_proj.name = new_name
            current_proj.description = data["description"]
        # Commit to make sure we have a valid ID
        current_proj.refresh_update_date()
        self.structure_helper.write_project_metadata(current_proj)
        current_proj = dao.store_entity(current_proj)

        # Retrieve, to initialize lazy attributes
        current_proj = dao.get_project_by_id(current_proj.id)
        # Update share settings on current Project entity
        visited_pages = []
        prj_admin = current_proj.administrator.username
        if 'visited_pages' in data and data['visited_pages']:
            visited_pages = data['visited_pages'].split(',')
        for page in visited_pages:
            members = UserService.retrieve_all_users(prj_admin, int(page))[0]
            members = [m.id for m in members]
            dao.delete_members_for_project(current_proj.id, members)
        selected_user_ids = data["users"]
        dao.add_members_to_project(current_proj.id, selected_user_ids)
        # Finish operation
        self.logger.debug("Edit/Save OK for project:" + str(current_proj.id) + ' by user:'******'-'
                result["count"] = one_op[2]
                result["gid"] = one_op[13]
                if one_op[3] is not None and one_op[3]:
                    try:
                        operation_group = dao.get_generic_entity(model.OperationGroup, one_op[3])[0]
                        result["group"] = operation_group.name
                        result["group"] = result["group"].replace("_", " ")
                        result["operation_group_id"] = operation_group.id
                        datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3])
                        result["datatype_group_gid"] = datatype_group.gid
                        result["gid"] = operation_group.gid
                        ## Filter only viewers for current DataTypeGroup entity:
                        result["view_groups"] = FlowService().get_visualizers_for_group(datatype_group.gid)
                    except Exception:
                        self.logger.exception("We will ignore group on entity:" + str(one_op))
                        result["datatype_group_gid"] = None
                else:
                    result['group'] = None
                    result['datatype_group_gid'] = None
                result["algorithm"] = dao.get_algorithm_by_id(one_op[4])
                result["user"] = dao.get_user_by_id(one_op[5])
                if type(one_op[6]) in (str, unicode):
                    result["create"] = string2date(str(one_op[6]))
                else:
                    result["create"] = one_op[6]
                if type(one_op[7]) in (str, unicode):
                    result["start"] = string2date(str(one_op[7]))
                else:
                    result["start"] = one_op[7]
                if type(one_op[8]) in (str, unicode):
                    result["complete"] = string2date(str(one_op[8]))
                else:
                    result["complete"] = one_op[8]

                if result["complete"] is not None and result["start"] is not None:
                    result["duration"] = format_timedelta(result["complete"] - result["start"])
                result["status"] = one_op[9]
                result["additional"] = one_op[10]
                result["visible"] = True if one_op[11] > 0 else False
                result['operation_tag'] = one_op[12]
                result['figures'] = None
                if not result['group']:
                    datatype_results = dao.get_results_for_operation(result['id'])
                    result['results'] = []
                    for dt in datatype_results:
                        dt_loaded = ABCAdapter.load_entity_by_gid(dt.gid)
                        if dt_loaded:
                            result['results'].append(dt_loaded)
                        else:
                            self.logger.warn("Could not retrieve datatype %s" % str(dt))

                    operation_figures = dao.get_figures_for_operation(result['id'])

                    # Compute the full path to the figure / image on disk
                    for figure in operation_figures:
                        figures_folder = self.structure_helper.get_images_folder(figure.project.name)
                        figure_full_path = os.path.join(figures_folder, figure.file_path)
                        # Compute the path available from browser
                        figure.figure_path = utils.path2url_part(figure_full_path)

                    result['figures'] = operation_figures
                else:
                    result['results'] = None
                operations.append(result)
            except Exception:
                ## We got an exception when processing one Operation Row. We will continue with the rest of the rows.
                self.logger.exception("Could not prepare operation for display:" + str(one_op))
        return selected_project, total_ops_nr, operations, pages_no


    def retrieve_projects_for_user(self, user_id, current_page=1):
        """
        Return a list with all Projects visible for current user.
        """
        start_idx = PROJECTS_PAGE_SIZE * (current_page - 1)
        total = dao.get_projects_for_user(user_id, is_count=True)
        available_projects = dao.get_projects_for_user(user_id, start_idx, PROJECTS_PAGE_SIZE)
        pages_no = total // PROJECTS_PAGE_SIZE + (1 if total % PROJECTS_PAGE_SIZE else 0)
        for prj in available_projects:
            fns, sta, err, canceled, pending = dao.get_operation_numbers(prj.id)
            prj.operations_finished = fns
            prj.operations_started = sta
            prj.operations_error = err
            prj.operations_canceled = canceled
            prj.operations_pending = pending
            prj.disk_size = dao.get_project_disk_size(prj.id)
            prj.disk_size_human = format_bytes_human(prj.disk_size)
        self.logger.debug("Displaying " + str(len(available_projects)) + " projects in UI for user " + str(user_id))
        return available_projects, pages_no


    @staticmethod
    def get_linkable_projects_for_user(user_id, data_id):
        """
        Find projects with are visible for current user, and in which current datatype hasn't been linked yet.
        """
        return dao.get_linkable_projects_for_user(user_id, data_id)


    @transactional
    def remove_project(self, project_id):
        """
        Remove Project from DB and File Storage.
        """
        try:
            project2delete = dao.get_project_by_id(project_id)

            self.logger.debug("Deleting project: id=" + str(project_id) + ' name=' + project2delete.name)
            project_bursts = dao.get_bursts_for_project(project_id)
            for burst in project_bursts:
                dao.remove_entity(burst.__class__, burst.id)

            project_datatypes = dao.get_datatypes_in_project(project_id)
            for one_data in project_datatypes:
                self.remove_datatype(project_id, one_data.gid, True)

            links = dao.get_links_for_project(project_id)
            for one_link in links:
                dao.remove_entity(model.Links, one_link.id)

            self.structure_helper.remove_project_structure(project2delete.name)
            dao.delete_project(project_id)
            self.logger.debug("Deleted project: id=" + str(project_id) + ' name=' + project2delete.name)

        except RemoveDataTypeException as excep:
            self.logger.exception("Could not execute operation Node Remove!")
            raise ProjectServiceException(str(excep))
        except FileStructureException as excep:
            self.logger.exception("Could not delete because of rights!")
            raise ProjectServiceException(str(excep))
        except Exception as excep:
            self.logger.exception(str(excep))
            raise ProjectServiceException(str(excep))


    # ----------------- Methods for populating Data-Structure Page ---------------

    @staticmethod
    def get_datatype_in_group(group):
        """
        Return all dataTypes that are the result of the same DTgroup.
        """
        return dao.get_datatype_in_group(datatype_group_id=group)


    @staticmethod
    def get_datatypes_from_datatype_group(datatype_group_id):
        """
        Retrieve all dataType which are part from the given dataType group.
        """
        return dao.get_datatypes_from_datatype_group(datatype_group_id)


    @staticmethod
    def load_operation_by_gid(operation_gid):
        """ Retrieve loaded Operation from DB"""
        return dao.get_operation_by_gid(operation_gid)


    @staticmethod
    def get_operation_group_by_id(operation_group_id):
        """ Loads OperationGroup from DB"""
        return dao.get_operationgroup_by_id(operation_group_id)


    @staticmethod
    def get_operation_group_by_gid(operation_group_gid):
        """ Loads OperationGroup from DB"""
        return dao.get_operationgroup_by_gid(operation_group_gid)


    @staticmethod
    def get_operations_in_group(operation_group):
        """ Return all the operations from an operation group. """
        return dao.get_operations_in_group(operation_group.id)


    @staticmethod
    def is_upload_operation(operation_gid):
        """ Returns True only if the operation with the given GID is an upload operation. """
        return dao.is_upload_operation(operation_gid)


    @staticmethod
    def get_all_operations_for_uploaders(project_id):
        """ Returns all finished upload operations. """
        return dao.get_all_operations_for_uploaders(project_id)

    def set_operation_and_group_visibility(self, entity_gid, is_visible, is_operation_group=False):
        """
        Sets the operation visibility.

        If 'is_operation_group' is True than this method will change the visibility for all
        the operation from the OperationGroup with the GID field equal to 'entity_gid'.
        """

        def set_visibility(op):
            # workaround:
            # 'reload' the operation so that it has the project property set.
            # get_operations_in_group does not eager load it and now we're out of a sqlalchemy session
            # write_operation_metadata requires that property
            op = dao.get_operation_by_id(op.id)
            # end hack
            op.visible = is_visible
            self.structure_helper.write_operation_metadata(op)
            dao.store_entity(op)

        def set_group_descendants_visibility(operation_group_id):
            ops_in_group = dao.get_operations_in_group(operation_group_id)
            for group_op in ops_in_group:
                set_visibility(group_op)

        if is_operation_group:
            op_group_id = dao.get_operationgroup_by_gid(entity_gid).id
            set_group_descendants_visibility(op_group_id)
        else:
            operation = dao.get_operation_by_gid(entity_gid)
            # we assure that if the operation belongs to a group than the visibility will be changed for the entire group
            if operation.fk_operation_group is not None:
                set_group_descendants_visibility(operation.fk_operation_group)
            else:
                set_visibility(operation)


    def get_operation_details(self, operation_gid, is_group):
        """
        :returns: an entity OperationOverlayDetails filled with all information for current operation details.
        """

        if is_group:
            operation_group = self.get_operation_group_by_gid(operation_gid)
            operation = dao.get_operations_in_group(operation_group.id, False, True)
            ## Reload, to make sure all attributes lazy are populated as well.
            operation = dao.get_operation_by_gid(operation.gid)
            no_of_op_in_group = dao.get_operations_in_group(operation_group.id, is_count=True)
            datatype_group = self.get_datatypegroup_by_op_group_id(operation_group.id)
            count_result = dao.count_datatypes_in_group(datatype_group.id)

        else:
            operation = dao.get_operation_by_gid(operation_gid)
            if operation is None:
                return None
            no_of_op_in_group = 1
            count_result = dao.count_resulted_datatypes(operation.id)

        username = dao.get_user_by_id(operation.fk_launched_by).username
        burst = dao.get_burst_for_operation_id(operation.id)
        datatypes_param, all_special_params = self._review_operation_inputs(operation.gid)

        op_pid = dao.get_operation_process_for_operation(operation.id)
        op_details = OperationOverlayDetails(operation, username, len(datatypes_param),
                                             count_result, burst, no_of_op_in_group, op_pid)

        ## Add all parameter which are set differently by the user on this Operation.
        if all_special_params is not None:
            op_details.add_scientific_fields(all_special_params)
        return op_details


    @staticmethod
    def get_filterable_meta():
        """
        Contains all the attributes by which
        the user can structure the tree of DataTypes
        """
        return DataTypeMetaData.get_filterable_meta()


    def get_project_structure(self, project, visibility_filter, first_level, second_level, filter_value):
        """
        Find all DataTypes (including the linked ones and the groups) relevant for the current project.
        In case of a problem, will return an empty list.
        """
        metadata_list = []
        dt_list = dao.get_data_in_project(project.id, visibility_filter, filter_value)

        for dt in dt_list:
            # Prepare the DT results from DB, for usage in controller, by converting into DataTypeMetaData objects
            data = {}
            is_group = False
            group_op = None
            dt_entity = dao.get_datatype_by_gid(dt.gid)
            if dt_entity is None:
                self.logger.warning("Ignored entity (possibly removed DT class)" + str(dt))
                continue
            ## Filter by dt.type, otherwise Links to individual DT inside a group will be mistaken
            if dt.type == "DataTypeGroup" and dt.parent_operation.operation_group is not None:
                is_group = True
                group_op = dt.parent_operation.operation_group

            # All these fields are necessary here for dynamic Tree levels.
            data[DataTypeMetaData.KEY_DATATYPE_ID] = dt.id
            data[DataTypeMetaData.KEY_GID] = dt.gid
            data[DataTypeMetaData.KEY_NODE_TYPE] = dt.type
            data[DataTypeMetaData.KEY_STATE] = dt.state
            data[DataTypeMetaData.KEY_SUBJECT] = str(dt.subject)
            data[DataTypeMetaData.KEY_TITLE] = dt_entity.display_name
            data[DataTypeMetaData.KEY_RELEVANCY] = dt.visible
            data[DataTypeMetaData.KEY_LINK] = dt.parent_operation.fk_launched_in != project.id

            data[DataTypeMetaData.KEY_TAG_1] = dt.user_tag_1 if dt.user_tag_1 else ''
            data[DataTypeMetaData.KEY_TAG_2] = dt.user_tag_2 if dt.user_tag_2 else ''
            data[DataTypeMetaData.KEY_TAG_3] = dt.user_tag_3 if dt.user_tag_3 else ''
            data[DataTypeMetaData.KEY_TAG_4] = dt.user_tag_4 if dt.user_tag_4 else ''
            data[DataTypeMetaData.KEY_TAG_5] = dt.user_tag_5 if dt.user_tag_5 else ''

            # Operation related fields:
            operation_name = CommonDetails.compute_operation_name(
                dt.parent_operation.algorithm.algorithm_category.displayname,
                dt.parent_operation.algorithm.displayname)
            data[DataTypeMetaData.KEY_OPERATION_TYPE] = operation_name
            data[DataTypeMetaData.KEY_OPERATION_ALGORITHM] = dt.parent_operation.algorithm.displayname
            data[DataTypeMetaData.KEY_AUTHOR] = dt.parent_operation.user.username
            data[DataTypeMetaData.KEY_OPERATION_TAG] = group_op.name if is_group else dt.parent_operation.user_group
            data[DataTypeMetaData.KEY_OP_GROUP_ID] = group_op.id if is_group else None

            completion_date = dt.parent_operation.completion_date
            string_year = completion_date.strftime(MONTH_YEAR_FORMAT) if completion_date is not None else ""
            string_month = completion_date.strftime(DAY_MONTH_YEAR_FORMAT) if completion_date is not None else ""
            data[DataTypeMetaData.KEY_DATE] = date2string(completion_date) if (completion_date is not None) else ''
            data[DataTypeMetaData.KEY_CREATE_DATA_MONTH] = string_year
            data[DataTypeMetaData.KEY_CREATE_DATA_DAY] = string_month

            data[DataTypeMetaData.KEY_BURST] = dt._parent_burst.name if dt._parent_burst is not None else '-None-'

            metadata_list.append(DataTypeMetaData(data, dt.invalid))

        return StructureNode.metadata2tree(metadata_list, first_level, second_level, project.id, project.name)


    @staticmethod
    def get_datatype_details(datatype_gid):
        """
        :returns: an array. First entry in array is an instance of DataTypeOverlayDetails\
            The second one contains all the possible states for the specified dataType.

        """
        meta_atts = DataTypeOverlayDetails()
        states = DataTypeMetaData.STATES
        try:
            datatype_result = dao.get_datatype_details(datatype_gid)
            meta_atts.fill_from_datatype(datatype_result, datatype_result._parent_burst)
            return meta_atts, states, datatype_result
        except Exception:
            ## We ignore exception here (it was logged above, and we want to return no details).
            return meta_atts, states, None


    def _remove_project_node_files(self, project_id, gid, skip_validation=False):
        """
        Delegate removal of a node in the structure of the project.
        In case of a problem will THROW StructureException.
        """
        try:
            project = self.find_project(project_id)
            datatype = dao.get_datatype_by_gid(gid)
            links = dao.get_links_for_datatype(datatype.id)
            if links:
                was_link = False
                for link in links:
                    # This means it's only a link and we need to remove it
                    if link.fk_from_datatype == datatype.id and link.fk_to_project == project.id:
                        dao.remove_entity(model.Links, link.id)
                        was_link = True
                if not was_link:
                    # Create a clone of the operation
                    new_op = model.Operation(dao.get_system_user().id,
                                             links[0].fk_to_project,
                                             datatype.parent_operation.fk_from_algo,
                                             datatype.parent_operation.parameters,
                                             datatype.parent_operation.meta_data,
                                             datatype.parent_operation.status,
                                             datatype.parent_operation.start_date,
                                             datatype.parent_operation.completion_date,
                                             datatype.parent_operation.fk_operation_group,
                                             datatype.parent_operation.additional_info,
                                             datatype.parent_operation.user_group,
                                             datatype.parent_operation.range_values)
                    new_op = dao.store_entity(new_op)
                    to_project = self.find_project(links[0].fk_to_project).name
                    new_op_loaded = dao.get_operation_by_id(new_op.id)
                    self.structure_helper.write_operation_metadata(new_op_loaded)
                    self.structure_helper.move_datatype(datatype, to_project, str(new_op.id))
                    datatype.set_operation_id(new_op.id)
                    datatype.parent_operation = new_op
                    dao.store_entity(datatype)
                    dao.remove_entity(model.Links, links[0].id)
            else:
                specific_remover = get_remover(datatype.type)(datatype)
                specific_remover.remove_datatype(skip_validation)
                self.structure_helper.remove_datatype(datatype)

        except RemoveDataTypeException:
            self.logger.exception("Could not execute operation Node Remove!")
            raise
        except FileStructureException:
            self.logger.exception("Remove operation failed")
            raise StructureException("Remove operation failed for unknown reasons.Please contact system administrator.")


    def remove_operation(self, operation_id):
        """
        Remove a given operation
        """
        operation = dao.try_get_operation_by_id(operation_id)
        if operation is not None:
            self.logger.debug("Deleting operation %s " % operation)
            datatypes_for_op = dao.get_results_for_operation(operation_id)
            for dt in reversed(datatypes_for_op):
                self.remove_datatype(operation.project.id, dt.gid, False)
            dao.remove_entity(model.Operation, operation.id)
            self.logger.debug("Finished deleting operation %s " % operation)
        else:
            self.logger.warning("Attempt to delete operation with id=%s which no longer exists." % operation_id)


    def remove_datatype(self, project_id, datatype_gid, skip_validation=False):
        """
        Method used for removing a dataType. If the given dataType is a DatatypeGroup
        or a dataType from a DataTypeGroup than this method will remove the entire group.
        The operation(s) used for creating the dataType(s) will also be removed.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype is None:
            self.logger.warning("Attempt to delete DT[%s] which no longer exists." % datatype_gid)
            return

        is_datatype_group = False
        if dao.is_datatype_group(datatype_gid):
            is_datatype_group = True
        elif datatype.fk_datatype_group is not None:
            is_datatype_group = True
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)

        operations_set = [datatype.fk_from_operation]

        correct = True

        if is_datatype_group:
            self.logger.debug("Removing datatype group %s" % datatype)
            data_list = dao.get_datatypes_from_datatype_group(datatype.id)
            for adata in data_list:
                self._remove_project_node_files(project_id, adata.gid, skip_validation)
                if adata.fk_from_operation not in operations_set:
                    operations_set.append(adata.fk_from_operation)

            datatype_group = dao.get_datatype_group_by_gid(datatype.gid)
            dao.remove_datatype(datatype_gid)
            correct = correct and dao.remove_entity(model.OperationGroup, datatype_group.fk_operation_group)
        else:
            self.logger.debug("Removing datatype %s" % datatype)
            self._remove_project_node_files(project_id, datatype.gid, skip_validation)

        ## Remove Operation entity in case no other DataType needs them.
        project = dao.get_project_by_id(project_id)
        for operation_id in operations_set:
            dependent_dt = dao.get_generic_entity(model.DataType, operation_id, "fk_from_operation")
            if len(dependent_dt) > 0:
                ### Do not remove Operation in case DataType still exist referring it.
                continue
            correct = correct and dao.remove_entity(model.Operation, operation_id)
            ## Make sure Operation folder is removed
            self.structure_helper.remove_operation_data(project.name, datatype.fk_from_operation)

        if not correct:
            raise RemoveDataTypeException("Could not remove DataType " + str(datatype_gid))


    def update_metadata(self, submit_data):
        """
        Update DataType/ DataTypeGroup metadata
        THROW StructureException when input data is invalid.
        """
        new_data = dict()
        for key in DataTypeOverlayDetails().meta_attributes_list:
            if key in submit_data:
                new_data[key] = submit_data[key]

        if new_data[CommonDetails.CODE_OPERATION_TAG] == '':
            new_data[CommonDetails.CODE_OPERATION_TAG] = None
        try:
            if (CommonDetails.CODE_OPERATION_GROUP_ID in new_data
                    and new_data[CommonDetails.CODE_OPERATION_GROUP_ID]
                    and new_data[CommonDetails.CODE_OPERATION_GROUP_ID] != ''):
                # We need to edit a group
                all_data_in_group = dao.get_datatype_in_group(operation_group_id=
                                                              new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
                if len(all_data_in_group) < 1:
                    raise StructureException("Inconsistent group, can not be updated!")
                datatype_group = dao.get_generic_entity(model.DataTypeGroup, all_data_in_group[0].fk_datatype_group)[0]
                all_data_in_group.append(datatype_group)
                for datatype in all_data_in_group:
                    new_data[CommonDetails.CODE_GID] = datatype.gid
                    self._edit_data(datatype, new_data, True)
            else:
                # Get the required DataType and operation from DB to store changes that will be done in XML.
                gid = new_data[CommonDetails.CODE_GID]
                datatype = dao.get_datatype_by_gid(gid)
                self._edit_data(datatype, new_data)
        except Exception as excep:
            self.logger.exception(excep)
            raise StructureException(str(excep))


    def _edit_data(self, datatype, new_data, from_group=False):
        """
        Private method, used for editing a meta-data XML file and a DataType row
        for a given custom DataType entity with new dictionary of data from UI.
        """
        if isinstance(datatype, MappedType) and not os.path.exists(datatype.get_storage_file_path()):
            if not datatype.invalid:
                datatype.invalid = True
                dao.store_entity(datatype)
            return
        # 1. First update Operation fields:
        #    Update group field if possible
        new_group_name = new_data[CommonDetails.CODE_OPERATION_TAG]
        empty_group_value = (new_group_name is None or new_group_name == "")
        if from_group:
            if empty_group_value:
                raise StructureException("Empty group is not allowed!")

            group = dao.get_generic_entity(model.OperationGroup, new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
            if group and len(group) > 0 and new_group_name != group[0].name:
                group = group[0]
                exists_group = dao.get_generic_entity(model.OperationGroup, new_group_name, 'name')
                if exists_group:
                    raise StructureException("Group '" + new_group_name + "' already exists.")
                group.name = new_group_name
                dao.store_entity(group)
        else:
            operation = dao.get_operation_by_id(datatype.fk_from_operation)
            operation.user_group = new_group_name
            dao.store_entity(operation)

        # 2. Update dateType fields:
        datatype.subject = new_data[DataTypeOverlayDetails.DATA_SUBJECT]
        datatype.state = new_data[DataTypeOverlayDetails.DATA_STATE]
        if DataTypeOverlayDetails.DATA_TAG_1 in new_data:
            datatype.user_tag_1 = new_data[DataTypeOverlayDetails.DATA_TAG_1]
        if DataTypeOverlayDetails.DATA_TAG_2 in new_data:
            datatype.user_tag_2 = new_data[DataTypeOverlayDetails.DATA_TAG_2]
        if DataTypeOverlayDetails.DATA_TAG_3 in new_data:
            datatype.user_tag_3 = new_data[DataTypeOverlayDetails.DATA_TAG_3]
        if DataTypeOverlayDetails.DATA_TAG_4 in new_data:
            datatype.user_tag_4 = new_data[DataTypeOverlayDetails.DATA_TAG_4]
        if DataTypeOverlayDetails.DATA_TAG_5 in new_data:
            datatype.user_tag_5 = new_data[DataTypeOverlayDetails.DATA_TAG_5]

        datatype = dao.store_entity(datatype)
        # 3. Update MetaData in H5 as well.
        datatype.persist_full_metadata()
        # 4. Update the group_name/user_group into the operation meta-data file
        operation = dao.get_operation_by_id(datatype.fk_from_operation)
        self.structure_helper.update_operation_metadata(operation.project.name, new_group_name,
                                                        str(datatype.fk_from_operation), from_group)


    def get_datatype_and_datatypegroup_inputs_for_operation(self, operation_gid, selected_filter):
        """
        Returns the dataTypes that are used as input parameters for the given operation.
        'selected_filter' - is expected to be a visibility filter.

        If any dataType is part of a dataType group then the dataType group will
        be returned instead of that dataType.
        """
        all_datatypes = self._review_operation_inputs(operation_gid)[0]
        datatype_inputs = []
        for datatype in all_datatypes:
            if selected_filter.display_name == StaticFiltersFactory.RELEVANT_VIEW:
                if datatype.visible:
                    datatype_inputs.append(datatype)
            else:
                datatype_inputs.append(datatype)
        datatypes = []
        datatype_groups = dict()
        for data_type in datatype_inputs:
            if data_type.fk_datatype_group is None:
                datatypes.append(data_type)
            elif data_type.fk_datatype_group not in datatype_groups:
                dt_group = dao.get_datatype_by_id(data_type.fk_datatype_group)
                datatype_groups[data_type.fk_datatype_group] = dt_group

        datatypes.extend([v for _, v in six.iteritems(datatype_groups)])
        return datatypes


    def _review_operation_inputs(self, operation_gid):
        """
        :returns: A list of DataTypes that are used as input parameters for the specified operation.
                 And a dictionary will all operation parameters different then the default ones.
        """
        operation = dao.get_operation_by_gid(operation_gid)
        parameters = json.loads(operation.parameters)
        try:
            adapter = ABCAdapter.build_adapter(operation.algorithm)
            return adapter.review_operation_inputs(parameters)

        except IntrospectionException:
            self.logger.warning("Could not find adapter class for operation %s" % operation_gid)
            inputs_datatypes = []
            changed_parameters = dict(Warning="Algorithm was Removed. We can not offer more details")
            for submit_param in parameters.values():
                self.logger.debug("Searching DT by GID %s" % submit_param)
                datatype = ABCAdapter.load_entity_by_gid(str(submit_param))
                if datatype is not None:
                    inputs_datatypes.append(datatype)
            return inputs_datatypes, changed_parameters


    def get_datatypes_inputs_for_operation_group(self, group_id, selected_filter):
        """
        Returns the dataType inputs for an operation group. If more dataTypes
        are part of the same dataType group then only the dataType group will
        be returned instead of them.
        """
        operations_gids = dao.get_operations_in_group(group_id, only_gids=True)
        op_group_inputs = dict()
        for gid in operations_gids:
            op_inputs = self.get_datatype_and_datatypegroup_inputs_for_operation(gid[0], selected_filter)
            for datatype in op_inputs:
                op_group_inputs[datatype.id] = datatype
        return op_group_inputs.values()


    @staticmethod
    def get_results_for_operation(operation_id, selected_filter=None):
        """
        Retrieve the DataTypes entities resulted after the execution of the given operation.
        """
        return dao.get_results_for_operation(operation_id, selected_filter)


    @staticmethod
    def get_operations_for_datatype_group(datatype_group_id, visibility_filter, only_in_groups=False):
        """
        Returns all the operations which uses as an input parameter a dataType from the given DataTypeGroup.
        visibility_filter - is a filter used for retrieving all the operations or only the relevant ones.

        If only_in_groups is True than this method will return only the operations that are
        part from an operation group, otherwise it will return only the operations that
        are NOT part of an operation group.
        """
        if visibility_filter.display_name != StaticFiltersFactory.RELEVANT_VIEW:
            return dao.get_operations_for_datatype_group(datatype_group_id, only_relevant=False,
                                                         only_in_groups=only_in_groups)
        return dao.get_operations_for_datatype_group(datatype_group_id, only_in_groups=only_in_groups)


    @staticmethod
    def get_operations_for_datatype(datatype_gid, visibility_filter, only_in_groups=False):
        """
        Returns all the operations which uses as an input parameter the dataType with the specified GID.

        If only_in_groups is True than this method will return only the operations that are part
        from an operation group, otherwise it will return only the operations that are NOT part of an operation group.
        """
        if visibility_filter.display_name != StaticFiltersFactory.RELEVANT_VIEW:
            return dao.get_operations_for_datatype(datatype_gid, only_relevant=False, only_in_groups=only_in_groups)
        return dao.get_operations_for_datatype(datatype_gid, only_in_groups=only_in_groups)


    @staticmethod
    def get_datatype_by_id(datatype_id):
        """Retrieve a DataType DB reference by its id."""
        return dao.get_datatype_by_id(datatype_id)


    @staticmethod
    def get_datatypegroup_by_gid(datatypegroup_gid):
        """ Returns the DataTypeGroup with the specified gid. """
        return dao.get_datatype_group_by_gid(datatypegroup_gid)

    @staticmethod
    def count_datatypes_generated_from(datatype_gid):
        """
        A list with all the datatypes resulted from operations that had as
        input the datatype given by 'datatype_gid'.
        """
        return dao.count_datatypes_generated_from(datatype_gid)


    @staticmethod
    def get_datatypegroup_by_op_group_id(operation_group_id):
        """ Returns the DataTypeGroup with the specified id. """
        return dao.get_datatypegroup_by_op_group_id(operation_group_id)


    @staticmethod
    def set_datatype_visibility(datatype_gid, is_visible):
        """
        Sets the dataType visibility. If the given dataType is a dataType group or it is part of a
        dataType group than this method will set the visibility for each dataType from this group.
        """

        def set_visibility(dt):
            """ set visibility flag, persist in db and h5"""
            dt.visible = is_visible
            dt = dao.store_entity(dt)
            dt.persist_full_metadata()

        def set_group_descendants_visibility(datatype_group_id):
            datatypes_in_group = dao.get_datatypes_from_datatype_group(datatype_group_id)
            for group_dt in datatypes_in_group:
                set_visibility(group_dt)

        datatype = dao.get_datatype_by_gid(datatype_gid)

        if isinstance(datatype, DataTypeGroup):  # datatype is a group
            set_group_descendants_visibility(datatype.id)
        elif datatype.fk_datatype_group is not None:  # datatype is member of a group
            set_group_descendants_visibility(datatype.fk_datatype_group)
            # the datatype to be updated is the parent datatype group
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)

        # update the datatype or datatype group.
        set_visibility(datatype)


    @staticmethod
    def is_datatype_group(datatype_gid):
        """ Used to check if the dataType with the specified GID is a DataTypeGroup. """
        return dao.is_datatype_group(datatype_gid)
class TestSimulationResource(RestResourceTest):
    def transactional_setup_method(self):
        self.test_user = TestFactory.create_user('Rest_User')
        self.test_project = TestFactory.create_project(
            self.test_user, 'Rest_Project', users=[self.test_user.id])
        self.simulation_resource = FireSimulationResource()
        self.files_helper = FilesHelper()

    def test_server_fire_simulation_inexistent_gid(self, mocker):
        self._mock_user(mocker)
        project_gid = "inexistent-gid"
        dummy_file = FileStorage(BytesIO(b"test"), 'test.zip')
        # Mock flask.request.files to return a dictionary
        request_mock = mocker.patch.object(flask, 'request')
        request_mock.files = {
            RequestFileKey.SIMULATION_FILE_KEY.value: dummy_file
        }

        with pytest.raises(InvalidIdentifierException):
            self.simulation_resource.post(project_gid=project_gid)

    def test_server_fire_simulation_no_file(self, mocker):
        self._mock_user(mocker)
        # Mock flask.request.files to return a dictionary
        request_mock = mocker.patch.object(flask, 'request')
        request_mock.files = {}

        with pytest.raises(InvalidIdentifierException):
            self.simulation_resource.post(project_gid='')

    def test_server_fire_simulation_bad_extension(self, mocker):
        self._mock_user(mocker)
        dummy_file = FileStorage(BytesIO(b"test"), 'test.txt')
        # Mock flask.request.files to return a dictionary
        request_mock = mocker.patch.object(flask, 'request')
        request_mock.files = {
            RequestFileKey.SIMULATION_FILE_KEY.value: dummy_file
        }

        with pytest.raises(InvalidIdentifierException):
            self.simulation_resource.post(project_gid='')

    def test_server_fire_simulation(self, mocker, connectivity_factory):
        self._mock_user(mocker)
        input_folder = self.files_helper.get_project_folder(self.test_project)
        sim_dir = os.path.join(input_folder, 'test_sim')
        if not os.path.isdir(sim_dir):
            os.makedirs(sim_dir)

        simulator = SimulatorAdapterModel()
        simulator.connectivity = connectivity_factory().gid
        h5.store_view_model(simulator, sim_dir)

        zip_filename = os.path.join(input_folder,
                                    RequestFileKey.SIMULATION_FILE_NAME.value)
        FilesHelper().zip_folder(zip_filename, sim_dir)

        # Mock flask.request.files to return a dictionary
        request_mock = mocker.patch.object(flask, 'request')
        fp = open(zip_filename, 'rb')
        request_mock.files = {
            RequestFileKey.SIMULATION_FILE_KEY.value:
            FileStorage(fp, os.path.basename(zip_filename))
        }

        def launch_sim(self, user_id, project, algorithm, zip_folder_path,
                       simulator_file):
            return Operation('', '', '', {})

        # Mock simulation launch and current user
        mocker.patch.object(SimulatorService, 'prepare_simulation_on_server',
                            launch_sim)

        operation_gid, status = self.simulation_resource.post(
            project_gid=self.test_project.gid)
        fp.close()

        assert type(operation_gid) is str
        assert status == 201

    def transactional_teardown_method(self):
        self.files_helper.remove_project_structure(self.test_project.name)
class ProjectServiceTest(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.services.project_service module.
    """
    def setUp(self):
        """
        Reset the database before each test.
        """
        config.EVENTS_FOLDER = ''
        self.project_service = ProjectService()
        self.structure_helper = FilesHelper()
        self.test_user = TestFactory.create_user()

    def tearDown(self):
        """
        Remove project folders and clean up database.
        """
        created_projects = dao.get_projects_for_user(self.test_user.id)
        for project in created_projects:
            self.structure_helper.remove_project_structure(project.name)
        self.delete_project_folders()

    def test_create_project_happy_flow(self):
        """
        Standard flow for creating a new project.
        """
        user1 = TestFactory.create_user('test_user1')
        user2 = TestFactory.create_user('test_user2')
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0,
                         "Database reset probably failed!")
        TestFactory.create_project(self.test_user,
                                   'test_project',
                                   users=[user1.id, user2.id])
        resulting_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(resulting_projects), 1,
                         "Project with valid data not inserted!")
        project = resulting_projects[0]
        if project.name == "test_project":
            self.assertEqual(project.description, "description",
                             "Description do no match")
            users_for_project = dao.get_members_of_project(project.id)
            for user in users_for_project:
                self.assertTrue(user.id in [user1.id, user2.id],
                                "Users not stored properly.")
        self.assertTrue(
            os.path.exists(
                os.path.join(TvbProfile.current.TVB_STORAGE,
                             FilesHelper.PROJECTS_FOLDER, "test_project")),
            "Folder for project was not created")

    def test_create_project_empty_name(self):
        """
        Creating a project with an empty name.
        """
        data = dict(name="", description="test_description", users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0,
                         "Database reset probably failed!")
        self.assertRaises(ProjectServiceException,
                          self.project_service.store_project, self.test_user,
                          True, None, **data)

    def test_edit_project_happy_flow(self):
        """
        Standard flow for editing an existing project.
        """
        selected_project = TestFactory.create_project(self.test_user,
                                                      'test_proj')
        proj_root = self.structure_helper.get_project_folder(selected_project)
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 1,
                         "Database initialization probably failed!")

        edited_data = dict(name="test_project",
                           description="test_description",
                           users=[])
        edited_project = self.project_service.store_project(
            self.test_user, False, selected_project.id, **edited_data)
        self.assertFalse(os.path.exists(proj_root),
                         "Previous folder not deleted")
        proj_root = self.structure_helper.get_project_folder(edited_project)
        self.assertTrue(os.path.exists(proj_root), "New folder not created!")
        self.assertNotEqual(selected_project.name, edited_project.name,
                            "Project was no changed!")

    def test_edit_project_unexisting(self):
        """
        Trying to edit an un-existing project.
        """
        selected_project = TestFactory.create_project(self.test_user,
                                                      'test_proj')
        self.structure_helper.get_project_folder(selected_project)
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 1,
                         "Database initialization probably failed!")
        data = dict(name="test_project",
                    description="test_description",
                    users=[])
        self.assertRaises(ProjectServiceException,
                          self.project_service.store_project, self.test_user,
                          False, 99, **data)

    def test_find_project_happy_flow(self):
        """
        Standard flow for finding a project by it's id.
        """
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0,
                         "Database reset probably failed!")
        inserted_project = TestFactory.create_project(self.test_user,
                                                      'test_project')
        self.assertTrue(
            self.project_service.find_project(inserted_project.id) is not None,
            "Project not found !")
        dao_returned_project = dao.get_project_by_id(inserted_project.id)
        service_returned_project = self.project_service.find_project(
            inserted_project.id)
        self.assertEqual(
            dao_returned_project.id, service_returned_project.id,
            "Data returned from service is different from data returned by DAO."
        )
        self.assertEqual(
            dao_returned_project.name, service_returned_project.name,
            "Data returned from service is different than  data returned by DAO."
        )
        self.assertEqual(
            dao_returned_project.description,
            service_returned_project.description,
            "Data returned from service is different from data returned by DAO."
        )
        self.assertEqual(
            dao_returned_project.members, service_returned_project.members,
            "Data returned from service is different from data returned by DAO."
        )

    def test_find_project_unexisting(self):
        """
        Searching for an un-existing project.
        """
        data = dict(name="test_project",
                    description="test_description",
                    users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0,
                         "Database reset probably failed!")
        project = self.project_service.store_project(self.test_user, True,
                                                     None, **data)
        # fetch a likely non-existing project. Previous project id plus a 'big' offset
        self.assertRaises(ProjectServiceException,
                          self.project_service.find_project, project.id + 1033)

    def test_retrieve_projects_for_user(self):
        """
        Test for retrieving the projects for a given user. One page only.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        self.assertEqual(len(initial_projects), 0,
                         "Database was not reset properly!")
        TestFactory.create_project(self.test_user, 'test_proj')
        TestFactory.create_project(self.test_user, 'test_proj1')
        TestFactory.create_project(self.test_user, 'test_proj2')
        user1 = TestFactory.create_user('another_user')
        TestFactory.create_project(user1, 'test_proj3')
        projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
        for project in projects:
            self.assertNotEquals(
                project.name, "test_project3",
                "This project should not have been retrieved")

    def test_retrieve_1project_3usr(self):
        """
        One user as admin, two users as members, getting projects for admin and for any of
        the members should return one.
        """
        member1 = TestFactory.create_user("member1")
        member2 = TestFactory.create_user("member2")
        TestFactory.create_project(self.test_user,
                                   'Testproject',
                                   users=[member1.id, member2.id])
        projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 1)[0]
        self.assertEqual(len(projects), 1, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(
            member1.id, 1)[0]
        self.assertEqual(len(projects), 1, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(
            member2.id, 1)[0]
        self.assertEqual(len(projects), 1, "Projects not retrieved properly!")

    def test_retrieve_3projects_3usr(self):
        """
        Three users, 3 projects. Structure of db:
        proj1: {admin: user1, members: [user2, user3]}
        proj2: {admin: user2, members: [user1]}
        proj3: {admin: user3, members: [user1, user2]}
        Check valid project returns for all the users.
        """
        member1 = TestFactory.create_user("member1")
        member2 = TestFactory.create_user("member2")
        member3 = TestFactory.create_user("member3")
        TestFactory.create_project(member1,
                                   'TestProject1',
                                   users=[member2.id, member3.id])
        TestFactory.create_project(member2, 'TestProject2', users=[member1.id])
        TestFactory.create_project(member3,
                                   'TestProject3',
                                   users=[member1.id, member2.id])
        projects = self.project_service.retrieve_projects_for_user(
            member1.id, 1)[0]
        self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(
            member2.id, 1)[0]
        self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(
            member3.id, 1)[0]
        self.assertEqual(len(projects), 2, "Projects not retrieved properly!")

    def test_retrieve_projects_random(self):
        """
        Generate a large number of users/projects, and validate the results.
        """
        ExtremeTestFactory.generate_users(NR_USERS, MAX_PROJ_PER_USER)
        for i in range(NR_USERS):
            current_user = dao.get_user_by_name("gen" + str(i))
            expected_projects = ExtremeTestFactory.VALIDATION_DICT[
                current_user.id]
            if expected_projects % PROJECTS_PAGE_SIZE == 0:
                expected_pages = expected_projects / PROJECTS_PAGE_SIZE
                exp_proj_per_page = PROJECTS_PAGE_SIZE
            else:
                expected_pages = expected_projects / PROJECTS_PAGE_SIZE + 1
                exp_proj_per_page = expected_projects % PROJECTS_PAGE_SIZE
            if expected_projects == 0:
                expected_pages = 0
                exp_proj_per_page = 0
            projects, pages = self.project_service.retrieve_projects_for_user(
                current_user.id, expected_pages)
            self.assertEqual(
                len(projects), exp_proj_per_page,
                "Projects not retrieved properly! Expected:" +
                str(exp_proj_per_page) + "but got:" + str(len(projects)))
            self.assertEqual(pages, expected_pages,
                             "Pages not retrieved properly!")

        for folder in os.listdir(TvbProfile.current.TVB_STORAGE):
            full_path = os.path.join(TvbProfile.current.TVB_STORAGE, folder)
            if os.path.isdir(full_path) and folder.startswith('Generated'):
                shutil.rmtree(full_path)

    def test_retrieve_projects_page2(self):
        """
        Test for retrieving the second page projects for a given user.
        """
        for i in range(PROJECTS_PAGE_SIZE + 3):
            TestFactory.create_project(self.test_user, 'test_proj' + str(i))
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 2)
        self.assertEqual(len(projects),
                         (PROJECTS_PAGE_SIZE + 3) % PROJECTS_PAGE_SIZE,
                         "Pagination inproper.")
        self.assertEqual(pages, 2, 'Wrong number of pages retrieved.')

    def test_retrieve_projects_and_del(self):
        """
        Test for retrieving the second page projects for a given user.
        """
        created_projects = []
        for i in range(PROJECTS_PAGE_SIZE + 1):
            created_projects.append(
                TestFactory.create_project(self.test_user,
                                           'test_proj' + str(i)))
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 2)
        self.assertEqual(len(projects),
                         (PROJECTS_PAGE_SIZE + 1) % PROJECTS_PAGE_SIZE,
                         "Pagination improper.")
        self.assertEqual(pages,
                         (PROJECTS_PAGE_SIZE + 1) / PROJECTS_PAGE_SIZE + 1,
                         'Wrong number of pages')
        self.project_service.remove_project(created_projects[1].id)
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 2)
        self.assertEqual(len(projects), 0, "Pagination improper.")
        self.assertEqual(pages, 1, 'Wrong number of pages retrieved.')
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 1)
        self.assertEqual(len(projects), PROJECTS_PAGE_SIZE,
                         "Pagination improper.")
        self.assertEqual(pages, 1, 'Wrong number of pages retrieved.')

    def test_empty_project_has_zero_disk_size(self):
        TestFactory.create_project(self.test_user, 'test_proj')
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id)
        self.assertEqual(0, projects[0].disk_size)
        self.assertEqual('0.0 KiB', projects[0].disk_size_human)

    def test_project_disk_size(self):
        project1 = TestFactory.create_project(self.test_user, 'test_proj1')
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_66.zip')
        TestFactory.import_zip_connectivity(self.test_user, project1,
                                            'testSubject', zip_path)

        project2 = TestFactory.create_project(self.test_user, 'test_proj2')
        TestFactory.import_cff(test_user=self.test_user, test_project=project2)

        projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        self.assertNotEqual(projects[0].disk_size, projects[1].disk_size,
                            "projects should have different size")

        for project in projects:
            self.assertNotEqual(0, project.disk_size)
            self.assertNotEqual('0.0 KiB', project.disk_size_human)

            prj_folder = self.structure_helper.get_project_folder(project)
            actual_disk_size = self.compute_recursive_h5_disk_usage(
                prj_folder)[0]

            ratio = float(actual_disk_size) / project.disk_size
            msg = "Real disk usage: %s The one recorded in the db : %s" % (
                actual_disk_size, project.disk_size)
            self.assertTrue(ratio < 1.4, msg)

    def test_get_linkable_projects(self):
        """
        Test for retrieving the projects for a given user.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        self.assertEqual(len(initial_projects), 0, "Database was not reset!")
        test_proj = []
        user1 = TestFactory.create_user("another_user")
        for i in range(4):
            test_proj.append(
                TestFactory.create_project(self.test_user if i < 3 else user1,
                                           'test_proj' + str(i)))

        project_storage = self.structure_helper.get_project_folder(
            test_proj[0])

        operation = TestFactory.create_operation(test_user=self.test_user,
                                                 test_project=test_proj[0])

        project_storage = os.path.join(project_storage, str(operation.id))
        os.makedirs(project_storage)
        datatype = dao.store_entity(
            model.DataType(module="test_data",
                           subject="subj1",
                           state="test_state",
                           operation_id=operation.id))
        linkable = self.project_service.get_linkable_projects_for_user(
            self.test_user.id, str(datatype.id))[0]
        self.assertEqual(len(linkable), 2,
                         "Wrong count of link-able projects!")
        proj_names = [project.name for project in linkable]
        self.assertTrue(test_proj[1].name in proj_names)
        self.assertTrue(test_proj[2].name in proj_names)
        self.assertFalse(test_proj[3].name in proj_names)

    def test_remove_project_happy_flow(self):
        """
        Standard flow for deleting a project.
        """
        inserted_project = TestFactory.create_project(self.test_user,
                                                      'test_proj')
        project_root = self.structure_helper.get_project_folder(
            inserted_project)
        projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(projects), 1, "Initializations failed!")
        self.assertTrue(os.path.exists(project_root),
                        "Something failed at insert time!")
        self.project_service.remove_project(inserted_project.id)
        projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(projects), 0, "Project was not deleted!")
        self.assertFalse(os.path.exists(project_root),
                         "Root folder not deleted!")

    def test_remove_project_wrong_id(self):
        """
        Flow for deleting a project giving an un-existing id.
        """
        TestFactory.create_project(self.test_user, 'test_proj')
        projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(projects), 1, "Initializations failed!")
        self.assertRaises(ProjectServiceException,
                          self.project_service.remove_project, 99)

    @staticmethod
    def _create_value_wrapper(test_user, test_project=None):
        """
        Creates a ValueWrapper dataType, and the associated parent Operation.
        This is also used in ProjectStructureTest.
        """
        if test_project is None:
            test_project = TestFactory.create_project(test_user, 'test_proj')
        operation = TestFactory.create_operation(test_user=test_user,
                                                 test_project=test_project)
        value_wrapper = ValueWrapper(data_value=5.0, data_name="my_value")
        value_wrapper.type = "ValueWrapper"
        value_wrapper.module = "tvb.datatypes.mapped_values"
        value_wrapper.subject = "John Doe"
        value_wrapper.state = "RAW_STATE"
        value_wrapper.set_operation_id(operation.id)
        adapter_instance = StoreAdapter([value_wrapper])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        all_value_wrappers = FlowService().get_available_datatypes(
            test_project.id, "tvb.datatypes.mapped_values.ValueWrapper")[0]
        if len(all_value_wrappers) != 1:
            raise Exception("Should be only one value wrapper.")
        result_vw = ABCAdapter.load_entity_by_gid(all_value_wrappers[0][2])
        return test_project, result_vw.gid, operation.gid

    def __check_meta_data(self, expected_meta_data, new_datatype):
        """Validate Meta-Data"""
        mapp_keys = {
            DataTypeMetaData.KEY_SUBJECT: "subject",
            DataTypeMetaData.KEY_STATE: "state"
        }
        for key, value in expected_meta_data.iteritems():
            if key in mapp_keys:
                self.assertEqual(value, getattr(new_datatype, mapp_keys[key]))
            elif key == DataTypeMetaData.KEY_OPERATION_TAG:
                if DataTypeMetaData.KEY_OP_GROUP_ID in expected_meta_data:
                    ## We have a Group to check
                    op_group = new_datatype.parent_operation.fk_operation_group
                    op_group = dao.get_generic_entity(model.OperationGroup,
                                                      op_group)[0]
                    self.assertEqual(value, op_group.name)
                else:
                    self.assertEqual(value,
                                     new_datatype.parent_operation.user_group)

    def test_remove_project_node(self):
        """
        Test removing of a node from a project.
        """
        inserted_project, gid, gid_op = self._create_value_wrapper(
            self.test_user)
        project_to_link = model.Project("Link", self.test_user.id, "descript")
        project_to_link = dao.store_entity(project_to_link)
        exact_data = dao.get_datatype_by_gid(gid)
        dao.store_entity(model.Links(exact_data.id, project_to_link.id))
        self.assertTrue(
            dao.get_datatype_by_gid(gid) is not None,
            "Initialization problem!")

        operation_id = dao.get_generic_entity(model.Operation, gid_op,
                                              'gid')[0].id
        op_folder = self.structure_helper.get_project_folder(
            "test_proj", str(operation_id))
        self.assertTrue(os.path.exists(op_folder))
        sub_files = os.listdir(op_folder)
        self.assertEqual(2, len(sub_files))
        ### Validate that no more files are created than needed.

        self.project_service._remove_project_node_files(
            inserted_project.id, gid)
        sub_files = os.listdir(op_folder)
        self.assertEqual(1, len(sub_files))
        ### operation.xml file should still be there

        op_folder = self.structure_helper.get_project_folder(
            "Link", str(operation_id + 1))
        sub_files = os.listdir(op_folder)
        self.assertEqual(2, len(sub_files))
        self.assertTrue(
            dao.get_datatype_by_gid(gid) is not None,
            "Data should still be in DB, because of links")
        self.project_service._remove_project_node_files(
            project_to_link.id, gid)
        self.assertTrue(dao.get_datatype_by_gid(gid) is None)
        sub_files = os.listdir(op_folder)
        self.assertEqual(1, len(sub_files))
        ### operation.xml file should still be there

    def test_update_meta_data_simple(self):
        """
        Test the new update metaData for a simple data that is not part of a group.
        """
        inserted_project, gid, _ = self._create_value_wrapper(self.test_user)
        new_meta_data = {
            DataTypeOverlayDetails.DATA_SUBJECT: "new subject",
            DataTypeOverlayDetails.DATA_STATE: "second_state",
            DataTypeOverlayDetails.CODE_GID: gid,
            DataTypeOverlayDetails.CODE_OPERATION_TAG: 'new user group'
        }
        self.project_service.update_metadata(new_meta_data)

        new_datatype = dao.get_datatype_by_gid(gid)
        self.__check_meta_data(new_meta_data, new_datatype)

        op_path = FilesHelper().get_operation_meta_file_path(
            inserted_project.name, new_datatype.parent_operation.id)
        op_meta = XMLReader(op_path).read_metadata()
        self.assertEqual(op_meta['user_group'], 'new user group',
                         'UserGroup not updated!')

    def test_update_meta_data_group(self):
        """
        Test the new update metaData for a group of dataTypes.
        """
        datatypes, group_id = TestFactory.create_group(
            self.test_user, subject="test-subject-1")

        new_meta_data = {
            DataTypeOverlayDetails.DATA_SUBJECT: "new subject",
            DataTypeOverlayDetails.DATA_STATE: "updated_state",
            DataTypeOverlayDetails.CODE_OPERATION_GROUP_ID: group_id,
            DataTypeOverlayDetails.CODE_OPERATION_TAG: 'newGroupName'
        }
        self.project_service.update_metadata(new_meta_data)

        for datatype in datatypes:
            new_datatype = dao.get_datatype_by_id(datatype.id)
            self.assertEqual(group_id,
                             new_datatype.parent_operation.fk_operation_group)
            new_group = dao.get_generic_entity(model.OperationGroup,
                                               group_id)[0]
            self.assertEqual(new_group.name, "newGroupName")
            self.__check_meta_data(new_meta_data, new_datatype)

    def _create_datatypes(self, dt_factory, nr_of_dts):
        for idx in range(nr_of_dts):
            dt = Datatype1()
            dt.row1 = "value%i" % (idx, )
            dt.row2 = "value%i" % (idx + 1, )
            dt_factory._store_datatype(dt)

    def test_retrieve_project_full(self):
        """
        Tests full project information is retrieved by method `ProjectService.retrieve_project_full(...)`
        """
        dt_factory = datatypes_factory.DatatypesFactory()
        self._create_datatypes(dt_factory, 3)
        _, ops_nr, operations, pages_no = self.project_service.retrieve_project_full(
            dt_factory.project.id)
        self.assertEqual(
            ops_nr, 1,
            "DataType Factory should only use one operation to store all it's datatypes."
        )
        self.assertEqual(
            pages_no, 1,
            "DataType Factory should only use one operation to store all it's datatypes."
        )
        resulted_dts = operations[0]['results']
        self.assertEqual(len(resulted_dts), 3,
                         "3 datatypes should be created.")

    def test_get_project_structure(self):
        """
        Tests project structure is as expected and contains all datatypes
        """
        SELF_DTS_NUMBER = 3
        dt_factory_1 = datatypes_factory.DatatypesFactory()
        self._create_datatypes(dt_factory_1, SELF_DTS_NUMBER)
        dt_group = dt_factory_1.create_datatype_group()

        link_ids, expected_links = [], []
        # Prepare link towards a simple DT
        dt_factory_2 = datatypes_factory.DatatypesFactory()
        dt_to_link = dt_factory_2.create_simple_datatype()
        link_ids.append(dt_to_link.id)
        expected_links.append(dt_to_link.gid)

        # Prepare links towards a full DT Group, but expecting only the DT_Group in the final tree
        link_gr = dt_factory_2.create_datatype_group()
        dts = dao.get_datatype_in_group(datatype_group_id=link_gr.id)
        link_ids.extend([dt_to_link.id for dt_to_link in dts])
        link_ids.append(link_gr.id)
        expected_links.append(link_gr.gid)

        # Prepare link towards a single DT inside a group, and expecting to find the DT in the final tree
        link_gr = dt_factory_2.create_datatype_group()
        dt_to_link = dao.get_datatype_in_group(datatype_group_id=link_gr.id)[0]
        link_ids.append(dt_to_link.id)
        expected_links.append(dt_to_link.gid)

        # Actually create the links from Prj2 into Prj1
        FlowService().create_link(link_ids, dt_factory_1.project.id)

        # Retrieve the raw data used to compose the tree (for easy parsing)
        dts_in_tree = dao.get_data_in_project(dt_factory_1.project.id)
        dts_in_tree = [dt.gid for dt in dts_in_tree]
        # Retrieve the tree json (for trivial validations only, as we can not decode)
        node_json = self.project_service.get_project_structure(
            dt_factory_1.project, None, DataTypeMetaData.KEY_STATE,
            DataTypeMetaData.KEY_SUBJECT, None)

        self.assertEqual(
            len(expected_links) + SELF_DTS_NUMBER + 2, len(dts_in_tree),
            "invalid number of nodes in tree")
        self.assertFalse(
            link_gr.gid in dts_in_tree,
            "DT_group where a single DT is linked is not expected.")
        self.assertTrue(dt_group.gid in dts_in_tree,
                        "DT_Group should be in the Project Tree!")
        self.assertTrue(dt_group.gid in node_json,
                        "DT_Group should be in the Project Tree JSON!")

        project_dts = dao.get_datatypes_in_project(dt_factory_1.project.id)
        for dt in project_dts:
            if dt.fk_datatype_group is not None:
                self.assertFalse(dt.gid in node_json,
                                 "DTs part of a group should not be")
                self.assertFalse(dt.gid in dts_in_tree,
                                 "DTs part of a group should not be")
            else:
                self.assertTrue(dt.gid in node_json,
                                "Simple DTs and DT_Groups should be")
                self.assertTrue(dt.gid in dts_in_tree,
                                "Simple DTs and DT_Groups should be")

        for link_gid in expected_links:
            self.assertTrue(link_gid in node_json, "Expected Link not present")
            self.assertTrue(link_gid in dts_in_tree,
                            "Expected Link not present")
Пример #11
0
class TestOperationResource(RestResourceTest):
    def transactional_setup_method(self):
        self.test_user = TestFactory.create_user('Rest_User')
        self.test_project = TestFactory.create_project(
            self.test_user, 'Rest_Project', users=[self.test_user.id])
        self.operations_resource = GetOperationsInProjectResource()
        self.status_resource = GetOperationStatusResource()
        self.results_resource = GetOperationResultsResource()
        self.launch_resource = LaunchOperationResource()
        self.files_helper = FilesHelper()

    def test_server_get_operation_status_inexistent_gid(self, mocker):
        self._mock_user(mocker)
        operation_gid = "inexistent-gid"
        with pytest.raises(InvalidIdentifierException):
            self.status_resource.get(operation_gid=operation_gid)

    def test_server_get_operation_status(self, mocker):
        self._mock_user(mocker)
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_96.zip')
        TestFactory.import_zip_connectivity(self.test_user, self.test_project,
                                            zip_path)

        request_mock = mocker.patch.object(flask, 'request')
        request_mock.args = {Strings.PAGE_NUMBER: '1'}

        operations_and_pages = self.operations_resource.get(
            project_gid=self.test_project.gid)

        result = self.status_resource.get(
            operation_gid=operations_and_pages['operations'][0].gid)
        assert type(result) is str
        assert result in OperationPossibleStatus

    def test_server_get_operation_results_inexistent_gid(self, mocker):
        self._mock_user(mocker)
        operation_gid = "inexistent-gid"
        with pytest.raises(InvalidIdentifierException):
            self.results_resource.get(operation_gid=operation_gid)

    def test_server_get_operation_results(self, mocker):
        self._mock_user(mocker)
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_96.zip')
        TestFactory.import_zip_connectivity(self.test_user, self.test_project,
                                            zip_path)

        request_mock = mocker.patch.object(flask, 'request')
        request_mock.args = {Strings.PAGE_NUMBER: '1'}

        operations_and_pages = self.operations_resource.get(
            project_gid=self.test_project.gid)

        result = self.results_resource.get(
            operation_gid=operations_and_pages['operations'][0].gid)
        assert type(result) is list
        assert len(result) == 1

    def test_server_get_operation_results_failed_operation(self, mocker):
        self._mock_user(mocker)
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_90.zip')
        with pytest.raises(TVBException):
            TestFactory.import_zip_connectivity(self.test_user,
                                                self.test_project, zip_path)

        request_mock = mocker.patch.object(flask, 'request')
        request_mock.args = {Strings.PAGE_NUMBER: '1'}

        operations_and_pages = self.operations_resource.get(
            project_gid=self.test_project.gid)

        result = self.results_resource.get(
            operation_gid=operations_and_pages['operations'][0].gid)
        assert type(result) is list
        assert len(result) == 0

    def test_server_launch_operation_no_file(self, mocker):
        self._mock_user(mocker)
        # Mock flask.request.files to return a dictionary
        request_mock = mocker.patch.object(flask, 'request')
        request_mock.files = {}

        with pytest.raises(InvalidIdentifierException):
            self.launch_resource.post(project_gid='',
                                      algorithm_module='',
                                      algorithm_classname='')

    def test_server_launch_operation_wrong_file_extension(self, mocker):
        self._mock_user(mocker)
        dummy_file = FileStorage(BytesIO(b"test"), 'test.txt')
        # Mock flask.request.files to return a dictionary
        request_mock = mocker.patch.object(flask, 'request')
        request_mock.files = {
            RequestFileKey.LAUNCH_ANALYZERS_MODEL_FILE.value: dummy_file
        }

        with pytest.raises(InvalidIdentifierException):
            self.launch_resource.post(project_gid='',
                                      algorithm_module='',
                                      algorithm_classname='')

    def test_server_launch_operation_inexistent_gid(self, mocker):
        self._mock_user(mocker)
        project_gid = "inexistent-gid"
        dummy_file = FileStorage(BytesIO(b"test"), 'test.h5')
        # Mock flask.request.files to return a dictionary
        request_mock = mocker.patch.object(flask, 'request')
        request_mock.files = {
            RequestFileKey.LAUNCH_ANALYZERS_MODEL_FILE.value: dummy_file
        }

        with pytest.raises(InvalidIdentifierException):
            self.launch_resource.post(project_gid=project_gid,
                                      algorithm_module='',
                                      algorithm_classname='')

    def test_server_launch_operation_inexistent_algorithm(self, mocker):
        self._mock_user(mocker)
        inexistent_algorithm = "inexistent-algorithm"

        dummy_file = FileStorage(BytesIO(b"test"), 'test.h5')
        # Mock flask.request.files to return a dictionary
        request_mock = mocker.patch.object(flask, 'request')
        request_mock.files = {
            RequestFileKey.LAUNCH_ANALYZERS_MODEL_FILE.value: dummy_file
        }

        with pytest.raises(InvalidIdentifierException):
            self.launch_resource.post(project_gid=self.test_project.gid,
                                      algorithm_module=inexistent_algorithm,
                                      algorithm_classname='')

    def test_server_launch_operation(self, mocker, time_series_index_factory):
        self._mock_user(mocker)
        algorithm_module = "tvb.adapters.analyzers.fourier_adapter"
        algorithm_class = "FourierAdapter"

        input_ts_index = time_series_index_factory()

        fft_model = FFTAdapterModel()
        fft_model.time_series = UUID(input_ts_index.gid)
        fft_model.window_function = list(SUPPORTED_WINDOWING_FUNCTIONS)[0]

        input_folder = self.files_helper.get_project_folder(self.test_project)
        view_model_h5_path = h5.path_for(input_folder, ViewModelH5,
                                         fft_model.gid)

        view_model_h5 = ViewModelH5(view_model_h5_path, fft_model)
        view_model_h5.store(fft_model)
        view_model_h5.close()

        # Mock flask.request.files to return a dictionary
        request_mock = mocker.patch.object(flask, 'request')
        fp = open(view_model_h5_path, 'rb')
        request_mock.files = {
            RequestFileKey.LAUNCH_ANALYZERS_MODEL_FILE.value:
            FileStorage(fp, os.path.basename(view_model_h5_path))
        }

        # Mock launch_operation() call and current_user
        mocker.patch.object(OperationService, 'launch_operation')

        operation_gid, status = self.launch_resource.post(
            project_gid=self.test_project.gid,
            algorithm_module=algorithm_module,
            algorithm_classname=algorithm_class)

        fp.close()

        assert type(operation_gid) is str
        assert len(operation_gid) > 0

    def transactional_teardown_method(self):
        self.files_helper.remove_project_structure(self.test_project.name)
class FilesHelperTest(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.entities.file.files_helper module.
    """
    PROJECT_NAME = "test_proj"

    def setUp(self):
        """
        Set up the context needed by the tests.
        """
        self.files_helper = FilesHelper()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user,
                                                       self.PROJECT_NAME)

    def tearDown(self):
        """ Remove generated project during tests. """
        self.delete_project_folders()

    def test_check_created(self):
        """ Test standard flows for check created. """
        self.files_helper.check_created()
        self.assertTrue(os.path.exists(root_storage), "Storage not created!")

        self.files_helper.check_created(os.path.join(root_storage, "test"))
        self.assertTrue(os.path.exists(root_storage), "Storage not created!")
        self.assertTrue(os.path.exists(os.path.join(root_storage, "test")),
                        "Test directory not created!")

    def test_get_project_folder(self):
        """
        Test the get_project_folder method which should create a folder in case
        it doesn't already exist.
        """
        project_path = self.files_helper.get_project_folder(self.test_project)
        self.assertTrue(os.path.exists(project_path), "Folder doesn't exist")

        folder_path = self.files_helper.get_project_folder(
            self.test_project, "43")
        self.assertTrue(os.path.exists(project_path), "Folder doesn't exist")
        self.assertTrue(os.path.exists(folder_path), "Folder doesn't exist")

    def test_rename_project_structure(self):
        """ Try to rename the folder structure of a project. Standard flow. """
        self.files_helper.get_project_folder(self.test_project)
        path, name = self.files_helper.rename_project_structure(
            self.test_project.name, "new_name")
        self.assertNotEqual(path, name, "Rename didn't take effect.")

    def test_rename_structure_same_name(self):
        """ Try to rename the folder structure of a project. Same name. """
        self.files_helper.get_project_folder(self.test_project)

        self.assertRaises(FileStructureException,
                          self.files_helper.rename_project_structure,
                          self.test_project.name, self.PROJECT_NAME)

    def test_remove_project_structure(self):
        """ Check that remove project structure deletes the corresponding folder. Standard flow. """
        full_path = self.files_helper.get_project_folder(self.test_project)
        self.assertTrue(os.path.exists(full_path), "Folder was not created.")

        self.files_helper.remove_project_structure(self.test_project.name)
        self.assertFalse(os.path.exists(full_path),
                         "Project folder not deleted.")

    def test_write_project_metadata(self):
        """  Write XML for test-project. """
        self.files_helper.write_project_metadata(self.test_project)
        expected_file = self.files_helper.get_project_meta_file_path(
            self.PROJECT_NAME)
        self.assertTrue(os.path.exists(expected_file))
        project_meta = XMLReader(expected_file).read_metadata()
        loaded_project = model.Project(None, None)
        loaded_project.from_dict(project_meta, self.test_user.id)
        self.assertEqual(self.test_project.name, loaded_project.name)
        self.assertEqual(self.test_project.description,
                         loaded_project.description)
        self.assertEqual(self.test_project.gid, loaded_project.gid)
        expected_dict = self.test_project.to_dict()[1]
        del expected_dict['last_updated']
        found_dict = loaded_project.to_dict()[1]
        del found_dict['last_updated']
        self.assertDictContainsSubset(expected_dict, found_dict)
        self.assertDictContainsSubset(found_dict, expected_dict)

    def test_write_operation_metadata(self):
        """
        Test that a correct XML is created for an operation.
        """
        operation = TestFactory.create_operation(
            test_user=self.test_user, test_project=self.test_project)
        expected_file = self.files_helper.get_operation_meta_file_path(
            self.PROJECT_NAME, operation.id)
        self.assertFalse(os.path.exists(expected_file))
        self.files_helper.write_operation_metadata(operation)
        self.assertTrue(os.path.exists(expected_file))
        operation_meta = XMLReader(expected_file).read_metadata()
        loaded_operation = model.Operation(None, None, None, None)
        loaded_operation.from_dict(operation_meta, dao)
        expected_dict = operation.to_dict()[1]
        found_dict = loaded_operation.to_dict()[1]
        for key, value in expected_dict.iteritems():
            self.assertEqual(str(value), str(found_dict[key]))
        # Now validate that operation metaData can be also updated
        self.assertNotEqual("new_group_name", found_dict['user_group'])
        self.files_helper.update_operation_metadata(self.PROJECT_NAME,
                                                    "new_group_name",
                                                    operation.id)
        found_dict = XMLReader(expected_file).read_metadata()
        self.assertEqual("new_group_name", found_dict['user_group'])

    def test_remove_dt_happy_flow(self):
        """
        Happy flow for removing a file related to a DataType.
        """
        folder_path = self.files_helper.get_project_folder(
            self.test_project, "42")
        datatype = MappedType()
        datatype.storage_path = folder_path
        open(datatype.get_storage_file_path(), 'w')
        self.assertTrue(os.path.exists(datatype.get_storage_file_path()),
                        "Test file was not created!")
        self.files_helper.remove_datatype(datatype)
        self.assertFalse(os.path.exists(datatype.get_storage_file_path()),
                         "Test file was not deleted!")

    def test_remove_dt_non_existent(self):
        """
        Try to call remove on a dataType with no H5 file.
        Should work.
        """
        folder_path = self.files_helper.get_project_folder(
            self.test_project, "42")
        datatype = MappedType()
        datatype.storage_path = folder_path
        self.assertFalse(os.path.exists(datatype.get_storage_file_path()))
        self.files_helper.remove_datatype(datatype)

    def test_move_datatype(self):
        """
        Make sure associated H5 file is moved to a correct new location.
        """
        folder_path = self.files_helper.get_project_folder(
            self.test_project, "42")
        datatype = MappedType()
        datatype.storage_path = folder_path
        open(datatype.get_storage_file_path(), 'w')
        self.assertTrue(os.path.exists(datatype.get_storage_file_path()),
                        "Test file was not created!")
        self.files_helper.move_datatype(datatype, self.PROJECT_NAME + '11',
                                        "43")

        self.assertFalse(os.path.exists(datatype.get_storage_file_path()),
                         "Test file was not moved!")
        datatype.storage_path = self.files_helper.get_project_folder(
            self.PROJECT_NAME + '11', "43")
        self.assertTrue(os.path.exists(datatype.get_storage_file_path()),
                        "Test file was not created!")

    def test_find_relative_path(self):
        """
        Tests that relative path is computed properly.
        """
        rel_path = self.files_helper.find_relative_path(
            "/root/up/to/here/test/it/now", "/root/up/to/here")
        self.assertEqual(rel_path, os.sep.join(["test", "it", "now"]),
                         "Did not extract relative path as expected.")

    def test_remove_files_valid(self):
        """
        Pass a valid list of files and check they are all removed.
        """
        file_list = ["test1", "test2", "test3"]
        for file_n in file_list:
            fp = open(file_n, 'w')
            fp.write('test')
            fp.close()
        for file_n in file_list:
            self.assertTrue(os.path.isfile(file_n))
        self.files_helper.remove_files(file_list)
        for file_n in file_list:
            self.assertFalse(os.path.isfile(file_n))

    def test_remove_folder(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        os.mkdir(folder_name)
        self.assertTrue(os.path.isdir(folder_name),
                        "Folder should be created.")
        self.files_helper.remove_folder(folder_name)
        self.assertFalse(os.path.isdir(folder_name),
                         "Folder should be deleted.")

    def test_remove_folder_non_existing_ignore_exc(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        self.assertFalse(os.path.isdir(folder_name),
                         "Folder should not exist before call.")
        self.files_helper.remove_folder(folder_name, ignore_errors=True)

    def test_remove_folder_non_existing(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        self.assertFalse(os.path.isdir(folder_name),
                         "Folder should not exist before call.")
        self.assertRaises(FileStructureException,
                          self.files_helper.remove_folder, folder_name, False)
class ProjectService:
    """
    Services layer for Project entities.
    """


    def __init__(self):
        self.logger = get_logger(__name__)
        self.structure_helper = FilesHelper()


    def store_project(self, current_user, is_create, selected_id, **data):
        """ 
        We want to create/update a project entity. 
        """
        #Validate Unique Name
        new_name = data["name"]
        if len(new_name) < 1:
            raise ProjectServiceException("Invalid project name!")
        projects_no = dao.count_projects_for_name(new_name, selected_id)
        if projects_no > 0:
            err = {'name': 'Please choose another name, this one is used!'}
            raise formencode.Invalid("Duplicate Name Error", {}, None, error_dict=err)
        started_operations = dao.get_operation_numbers(selected_id)[1]
        if started_operations > 0:
            raise ProjectServiceException("A project can not be renamed while operations are still running!")
        if is_create:
            current_proj = model.Project(new_name, current_user.id, data["description"])
            #Create the root folder, and remove if that folder already exists.
            self.structure_helper.remove_project_structure(new_name)
            self.structure_helper.get_project_folder(current_proj)
        else:
            try:
                current_proj = dao.get_project_by_id(selected_id)
            except Exception, excep:
                self.logger.error("An error has occurred!")
                self.logger.exception(excep)
                raise ProjectServiceException(excep.message)
            if current_proj.name != new_name:
                self.structure_helper.rename_project_structure(current_proj.name, new_name)
            current_proj.name = new_name
            current_proj.description = data["description"]
        #Commit to make sure we have a valid ID
        current_proj.refresh_update_date()
        self.structure_helper.write_project_metadata(current_proj)
        current_proj = dao.store_entity(current_proj)

        if is_create:
            handle_event(".".join([self.__class__.__name__, stack()[0][3]]), dao.get_system_user(), current_proj)

        #Retrieve, to initialize lazy attributes
        current_proj = dao.get_project_by_id(current_proj.id)
        #Update share settings on current Project entity 
        visited_pages = []
        prj_admin = current_proj.administrator.username
        if 'visited_pages' in data and data['visited_pages']:
            visited_pages = data['visited_pages'].split(',')
        for page in visited_pages:
            members = UserService.retrieve_all_users(prj_admin, int(page))[0]
            members = [m.id for m in members]
            dao.delete_members_for_project(current_proj.id, members)
        selected_user_ids = data["users"]
        for u_id in selected_user_ids:
            dao.store_entity(model.User_to_Project(u_id, current_proj.id))
        #Finish operation
        self.logger.debug("Edit/Save OK for project:" + str(current_proj.id) + ' by user:' + current_user.username)
        return current_proj