示例#1
0
class ProjectFacade:
    def __init__(self):
        self.project_service = ProjectService()
        self.user_service = UserService()
        self.project_dao = CaseDAO()

    @staticmethod
    def retrieve_logged_user_projects(logged_user_id):
        projects = ProjectService.retrieve_all_user_projects(user_id=logged_user_id)
        return [ProjectDto(project) for project in projects]

    def create_project(self, logged_user, project_name, project_description):
        self.project_service.store_project(logged_user, True, None, name=project_name,
                                           description=project_description)

    def get_datatypes_in_project(self, project_gid):
        try:
            project = self.project_service.find_project_lazy_by_gid(project_gid)
        except ProjectServiceException:
            raise InvalidIdentifierException()

        datatypes = self.project_service.get_datatypes_in_project(project.id)
        return [DataTypeDto(datatype) for datatype in datatypes]

    def get_project_operations(self, project_gid, page_number):
        try:
            project = self.project_service.find_project_lazy_by_gid(project_gid)
        except ProjectServiceException:
            raise InvalidIdentifierException()

        _, _, operations, pages = self.project_service.retrieve_project_full(project.id, current_page=int(page_number))
        return [OperationDto(operation) for operation in operations], pages

    def add_members_to_project(self, current_user_id, project_gid, new_members_gid):
        try:
            project = self.project_service.find_project_lazy_by_gid(project_gid)
        except Exception:
            raise InvalidIdentifierException("Invalid project identifier.")

        if current_user_id != project.fk_admin:
            raise AuthorizationRequestException("Your are not allowed to edit given project")

        new_members_id = []
        for gid in new_members_gid:
            user = self.user_service.get_user_by_gid(gid)
            if user is None:
                raise InvalidInputException("Invalid user gid {}".format(gid))
            new_members_id.append(user.id)
        self.project_dao.add_members_to_project(project.id, new_members_id)
    def __init__(self):
        micro_postfix = "_%d" % int(time.time() * 1000000)

        # Here create all structures needed later for data types creation
        self.files_helper = FilesHelper()

        # First create user 
        user = model.User("datatype_factory_user" + micro_postfix, "test_pass",
                          "*****@*****.**" + micro_postfix, True, "user")
        self.user = dao.store_entity(user)

        # Now create a project
        project_service = ProjectService()
        data = dict(name='DatatypesFactoryProject' + micro_postfix, description='test_desc', users=[])
        self.project = project_service.store_project(self.user, True, None, **data)

        # Create algorithm
        alg_category = model.AlgorithmCategory('one', True)
        dao.store_entity(alg_category)
        alg_group = model.AlgorithmGroup("test_module1", "classname1", alg_category.id)
        dao.store_entity(alg_group)
        algorithm = model.Algorithm(alg_group.id, 'id', name='', req_data='', param_name='', output='')
        self.algorithm = dao.store_entity(algorithm)

        #Create an operation
        self.meta = {DataTypeMetaData.KEY_SUBJECT: self.USER_FULL_NAME,
                     DataTypeMetaData.KEY_STATE: self.DATATYPE_STATE}
        operation = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters',
                                    meta=json.dumps(self.meta), status=model.STATUS_FINISHED,
                                    method_name=ABCAdapter.LAUNCH_METHOD)
        self.operation = dao.store_entity(operation)
示例#3
0
    def __init__(self):
        micro_postfix = "_%d" % int(time.time() * 1000000)

        # Here create all structures needed later for data types creation
        self.files_helper = FilesHelper()

        # First create user 
        user = model.User("datatype_factory_user" + micro_postfix, "test_pass",
                          "*****@*****.**" + micro_postfix, True, "user")
        self.user = dao.store_entity(user)

        # Now create a project
        project_service = ProjectService()
        data = dict(name='DatatypesFactoryProject' + micro_postfix, description='test_desc', users=[])
        self.project = project_service.store_project(self.user, True, None, **data)

        # Create algorithm
        alg_category = model.AlgorithmCategory('one', True)
        dao.store_entity(alg_category)
        ad = model.Algorithm(SIMULATOR_MODULE, SIMULATOR_CLASS, alg_category.id)
        self.algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE, SIMULATOR_CLASS)
        if self.algorithm is None:
            self.algorithm = dao.store_entity(ad)

        # Create an operation
        self.meta = {DataTypeMetaData.KEY_SUBJECT: self.USER_FULL_NAME,
                     DataTypeMetaData.KEY_STATE: self.DATATYPE_STATE}
        operation = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters',
                                    meta=json.dumps(self.meta), status=model.STATUS_FINISHED)
        self.operation = dao.store_entity(operation)
示例#4
0
    def __init__(self):
        micro_postfix = "_%d" % int(time.time() * 1000000)

        # Here create all structures needed later for data types creation
        self.files_helper = FilesHelper()

        # First create user 
        user = model.User("datatype_factory_user" + micro_postfix, "test_pass",
                          "*****@*****.**" + micro_postfix, True, "user")
        self.user = dao.store_entity(user)

        # Now create a project
        project_service = ProjectService()
        data = dict(name='DatatypesFactoryProject' + micro_postfix, description='test_desc', users=[])
        self.project = project_service.store_project(self.user, True, None, **data)

        # Create algorithm
        alg_category = model.AlgorithmCategory('one', True)
        dao.store_entity(alg_category)
        ad = model.Algorithm("test_module1", "classname1", alg_category.id)
        self.algorithm = dao.store_entity(ad)

        #Create an operation
        self.meta = {DataTypeMetaData.KEY_SUBJECT: self.USER_FULL_NAME,
                     DataTypeMetaData.KEY_STATE: self.DATATYPE_STATE}
        operation = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters',
                                    meta=json.dumps(self.meta), status=model.STATUS_FINISHED)
        self.operation = dao.store_entity(operation)
示例#5
0
class EventHandlerTest(BaseTestCase):
    """
    This class contains tests for the tvb.core.services.event_handler module.
    """
    def setUp(self):
        """
        Reset the database before each test.
        """
        self.project_service = ProjectService()
        self.test_user = TestFactory.create_user()

    def tearDown(self):
        """
        Cleans the environment after testing (database and executors dictionary)
        """
        self.clean_database()
        event_handlers.EXECUTORS_DICT = {}

    def test_handle_event(self):
        """
        Test a defined handler for the store project method.
        """
        path_to_events = os.path.dirname(__file__)
        event_handlers.read_events([path_to_events])
        data = dict(name="test_project",
                    description="test_description",
                    users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0,
                         "Database reset probably failed!")
        test_project = self.project_service.store_project(
            self.test_user, True, None, **data)
        # Operations will start asynchronously; Give them time.
        time.sleep(1)
        gid = dao.get_last_data_with_uid("test_uid")
        self.assertTrue(gid is not None, "Nothing was stored in database!")
        datatype = dao.get_datatype_by_gid(gid)
        self.assertEqual(datatype.type, "Datatype1", "Wrong data stored!")
        self.project_service._remove_project_node_files(test_project.id, gid)
class EventHandlerTest(BaseTestCase):
    """
    This class contains tests for the tvb.core.services.event_handler module.
    """ 
       
    def setUp(self):
        """
        Reset the database before each test.
        """
        self.project_service = ProjectService()
        self.test_user = TestFactory.create_user()
        
        
    def tearDown(self):
        """
        Cleans the environment after testing (database and executors dictionary)
        """
        self.clean_database()
        event_handlers.EXECUTORS_DICT = {}
        
        
    def test_handle_event(self):
        """
        Test a defined handler for the store project method.
        """
        path_to_events = os.path.dirname(__file__)
        event_handlers.read_events([path_to_events])
        data = dict(name="test_project", description="test_description", users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0, "Database reset probably failed!")
        test_project = self.project_service.store_project(self.test_user, True, None, **data)
        # Operations will start asynchronously; Give them time.
        time.sleep(1)
        gid = dao.get_last_data_with_uid("test_uid")
        self.assertTrue(gid is not None, "Nothing was stored in database!")
        datatype = dao.get_datatype_by_gid(gid)
        self.assertEqual(datatype.type, "Datatype1", "Wrong data stored!")
        self.project_service._remove_project_node_files(test_project.id, gid)
class ProjectServiceTest(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.services.project_service module.
    """    
    
    def setUp(self):
        """
        Reset the database before each test.
        """
        config.EVENTS_FOLDER = ''
        self.project_service = ProjectService()
        self.structure_helper = FilesHelper()
        self.test_user = TestFactory.create_user()
    
    
    def tearDown(self):
        """
        Remove project folders and clean up database.
        """
        created_projects = dao.get_projects_for_user(self.test_user.id)
        for project in created_projects:
            self.structure_helper.remove_project_structure(project.name)
        self.delete_project_folders()
    
    
    def test_create_project_happy_flow(self):
        """
        Standard flow for creating a new project.
        """
        user1 = TestFactory.create_user('test_user1')
        user2 = TestFactory.create_user('test_user2')
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0, "Database reset probably failed!")
        TestFactory.create_project(self.test_user, 'test_project', users=[user1.id, user2.id])
        resulting_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(resulting_projects), 1, "Project with valid data not inserted!")  
        project = resulting_projects[0]
        if project.name == "test_project":
            self.assertEqual(project.description, "description", "Description do no match")
            users_for_project = dao.get_members_of_project(project.id)
            for user in users_for_project:
                self.assertTrue(user.id in [user1.id, user2.id], "Users not stored properly.")
        self.assertTrue(os.path.exists(os.path.join(TvbProfile.current.TVB_STORAGE, FilesHelper.PROJECTS_FOLDER,
                                                    "test_project")), "Folder for project was not created")
   
   
    def test_create_project_empty_name(self):
        """
        Creating a project with an empty name.
        """
        data = dict(name="", description="test_description", users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0, "Database reset probably failed!")
        self.assertRaises(ProjectServiceException, self.project_service.store_project, 
                          self.test_user, True, None, **data)
   
   
    def test_edit_project_happy_flow(self):
        """
        Standard flow for editing an existing project.
        """
        selected_project = TestFactory.create_project(self.test_user, 'test_proj')
        proj_root = self.structure_helper.get_project_folder(selected_project)
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 1, "Database initialization probably failed!")
        
        edited_data = dict(name="test_project", description="test_description", users=[])
        edited_project = self.project_service.store_project(self.test_user, False, selected_project.id, **edited_data)
        self.assertFalse(os.path.exists(proj_root), "Previous folder not deleted")
        proj_root = self.structure_helper.get_project_folder(edited_project)
        self.assertTrue(os.path.exists(proj_root), "New folder not created!")
        self.assertNotEqual(selected_project.name, edited_project.name, "Project was no changed!")  
        
             
    def test_edit_project_unexisting(self):
        """
        Trying to edit an un-existing project.
        """
        selected_project = TestFactory.create_project(self.test_user, 'test_proj')
        self.structure_helper.get_project_folder(selected_project)
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 1, "Database initialization probably failed!")
        data = dict(name="test_project", description="test_description", users=[])
        self.assertRaises(ProjectServiceException, self.project_service.store_project,
                          self.test_user, False, 99, **data)

    
    def test_find_project_happy_flow(self):
        """
        Standard flow for finding a project by it's id.
        """
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0, "Database reset probably failed!")
        inserted_project = TestFactory.create_project(self.test_user, 'test_project')
        self.assertTrue(self.project_service.find_project(inserted_project.id) is not None, "Project not found !")
        dao_returned_project = dao.get_project_by_id(inserted_project.id)
        service_returned_project = self.project_service.find_project(inserted_project.id)
        self.assertEqual(dao_returned_project.id, service_returned_project.id,
                         "Data returned from service is different from data returned by DAO.")
        self.assertEqual(dao_returned_project.name, service_returned_project.name, 
                         "Data returned from service is different than  data returned by DAO.")  
        self.assertEqual(dao_returned_project.description, service_returned_project.description,
                         "Data returned from service is different from data returned by DAO.")        
        self.assertEqual(dao_returned_project.members, service_returned_project.members,
                         "Data returned from service is different from data returned by DAO.")
                      
        
    def test_find_project_unexisting(self):
        """
        Searching for an un-existing project.
        """
        data = dict(name="test_project", description="test_description", users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0, "Database reset probably failed!")
        self.project_service.store_project(self.test_user, True, None, **data)
        self.assertRaises(ProjectServiceException, self.project_service.find_project, 99)  
        
        
    def test_retrieve_projects_for_user(self):
        """
        Test for retrieving the projects for a given user. One page only.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        self.assertEqual(len(initial_projects), 0, "Database was not reset properly!")
        TestFactory.create_project(self.test_user, 'test_proj')
        TestFactory.create_project(self.test_user, 'test_proj1')
        TestFactory.create_project(self.test_user, 'test_proj2')
        user1 = TestFactory.create_user('another_user')
        TestFactory.create_project(user1, 'test_proj3')
        projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
        for project in projects:
            self.assertNotEquals(project.name, "test_project3", "This project should not have been retrieved")   
            
            
    def test_retrieve_1project_3usr(self):
        """
        One user as admin, two users as members, getting projects for admin and for any of
        the members should return one.
        """
        member1 = TestFactory.create_user("member1")
        member2 = TestFactory.create_user("member2")
        TestFactory.create_project(self.test_user, 'Testproject', users=[member1.id, member2.id])
        projects = self.project_service.retrieve_projects_for_user(self.test_user.id, 1)[0]
        self.assertEqual(len(projects), 1, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(member1.id, 1)[0]
        self.assertEqual(len(projects), 1, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(member2.id, 1)[0]
        self.assertEqual(len(projects), 1, "Projects not retrieved properly!")
        
        
    def test_retrieve_3projects_3usr(self):
        """
        Three users, 3 projects. Structure of db:
        proj1: {admin: user1, members: [user2, user3]}
        proj2: {admin: user2, members: [user1]}
        proj3: {admin: user3, members: [user1, user2]}
        Check valid project returns for all the users.
        """
        member1 = TestFactory.create_user("member1")
        member2 = TestFactory.create_user("member2")
        member3 = TestFactory.create_user("member3")
        TestFactory.create_project(member1, 'TestProject1', users=[member2.id, member3.id])
        TestFactory.create_project(member2, 'TestProject2', users=[member1.id])
        TestFactory.create_project(member3, 'TestProject3', users=[member1.id, member2.id])
        projects = self.project_service.retrieve_projects_for_user(member1.id, 1)[0]
        self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(member2.id, 1)[0]
        self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(member3.id, 1)[0]
        self.assertEqual(len(projects), 2, "Projects not retrieved properly!")
        
        
    def test_retrieve_projects_random(self):
        """
        Generate a large number of users/projects, and validate the results.
        """
        ExtremeTestFactory.generate_users(NR_USERS, MAX_PROJ_PER_USER)
        for i in range(NR_USERS):
            current_user = dao.get_user_by_name("gen" + str(i))
            expected_projects = ExtremeTestFactory.VALIDATION_DICT[current_user.id]
            if expected_projects % PROJECTS_PAGE_SIZE == 0:
                expected_pages = expected_projects / PROJECTS_PAGE_SIZE
                exp_proj_per_page = PROJECTS_PAGE_SIZE
            else:
                expected_pages = expected_projects / PROJECTS_PAGE_SIZE + 1
                exp_proj_per_page = expected_projects % PROJECTS_PAGE_SIZE
            if expected_projects == 0:
                expected_pages = 0
                exp_proj_per_page = 0
            projects, pages = self.project_service.retrieve_projects_for_user(current_user.id, expected_pages)
            self.assertEqual(len(projects), exp_proj_per_page, "Projects not retrieved properly! Expected:" +
                             str(exp_proj_per_page) + "but got:" + str(len(projects)))
            self.assertEqual(pages, expected_pages, "Pages not retrieved properly!")

        for folder in os.listdir(TvbProfile.current.TVB_STORAGE):
            full_path = os.path.join(TvbProfile.current.TVB_STORAGE, folder)
            if os.path.isdir(full_path) and folder.startswith('Generated'): 
                shutil.rmtree(full_path)
        
            
    def test_retrieve_projects_page2(self):
        """
        Test for retrieving the second page projects for a given user.
        """
        for i in range(PROJECTS_PAGE_SIZE + 3):
            TestFactory.create_project(self.test_user, 'test_proj' + str(i))
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id, 2)
        self.assertEqual(len(projects), (PROJECTS_PAGE_SIZE + 3) % PROJECTS_PAGE_SIZE, "Pagination inproper.")
        self.assertEqual(pages, 2, 'Wrong number of pages retrieved.')
        
        
    def test_retrieve_projects_and_del(self):
        """
        Test for retrieving the second page projects for a given user.
        """
        created_projects = []
        for i in range(PROJECTS_PAGE_SIZE + 1):
            created_projects.append(TestFactory.create_project(self.test_user, 'test_proj' + str(i)))
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id, 2)
        self.assertEqual(len(projects), (PROJECTS_PAGE_SIZE + 1) % PROJECTS_PAGE_SIZE, "Pagination improper.")
        self.assertEqual(pages, (PROJECTS_PAGE_SIZE + 1) / PROJECTS_PAGE_SIZE + 1, 'Wrong number of pages')
        self.project_service.remove_project(created_projects[1].id)
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id, 2)
        self.assertEqual(len(projects), 0, "Pagination improper.")
        self.assertEqual(pages, 1, 'Wrong number of pages retrieved.')
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id, 1)
        self.assertEqual(len(projects), PROJECTS_PAGE_SIZE, "Pagination improper.")
        self.assertEqual(pages, 1, 'Wrong number of pages retrieved.')


    def test_empty_project_has_zero_disk_size(self):
        TestFactory.create_project(self.test_user, 'test_proj')
        projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id)
        self.assertEqual(0, projects[0].disk_size)
        self.assertEqual('0.0 KiB', projects[0].disk_size_human)


    def test_project_disk_size(self):
        project1 = TestFactory.create_project(self.test_user, 'test_proj1')
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip')
        TestFactory.import_zip_connectivity(self.test_user, project1, 'testSubject', zip_path)

        project2 = TestFactory.create_project(self.test_user, 'test_proj2')
        TestFactory.import_cff(test_user=self.test_user, test_project=project2)

        projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        self.assertNotEqual(projects[0].disk_size, projects[1].disk_size, "projects should have different size")

        for project in projects:
            self.assertNotEqual(0, project.disk_size)
            self.assertNotEqual('0.0 KiB', project.disk_size_human)

            prj_folder = self.structure_helper.get_project_folder(project)
            actual_disk_size = self.compute_recursive_h5_disk_usage(prj_folder)[0]

            ratio = float(actual_disk_size) / project.disk_size
            msg = "Real disk usage: %s The one recorded in the db : %s" % (actual_disk_size, project.disk_size)
            self.assertTrue(ratio < 1.4, msg)


    def test_get_linkable_projects(self):
        """
        Test for retrieving the projects for a given user.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        self.assertEqual(len(initial_projects), 0, "Database was not reset!")
        test_proj = []
        user1 = TestFactory.create_user("another_user")
        for i in range(4):
            test_proj.append(TestFactory.create_project(self.test_user if i < 3 else user1, 'test_proj' + str(i)))

        project_storage = self.structure_helper.get_project_folder(test_proj[0])

        operation = TestFactory.create_operation(test_user=self.test_user, test_project=test_proj[0])

        project_storage = os.path.join(project_storage, str(operation.id))
        os.makedirs(project_storage)
        datatype = dao.store_entity(model.DataType(module="test_data", subject="subj1", 
                                                   state="test_state", operation_id=operation.id))
        linkable = self.project_service.get_linkable_projects_for_user(self.test_user.id, str(datatype.id))[0]
        self.assertEqual(len(linkable), 2, "Wrong count of link-able projects!")
        proj_names = [project.name for project in linkable]
        self.assertTrue(test_proj[1].name in proj_names)
        self.assertTrue(test_proj[2].name in proj_names)
        self.assertFalse(test_proj[3].name in proj_names)    
    
    
    def test_remove_project_happy_flow(self):
        """
        Standard flow for deleting a project.
        """
        inserted_project = TestFactory.create_project(self.test_user, 'test_proj')
        project_root = self.structure_helper.get_project_folder(inserted_project)
        projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(projects), 1, "Initializations failed!") 
        self.assertTrue(os.path.exists(project_root), "Something failed at insert time!")
        self.project_service.remove_project(inserted_project.id)
        projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(projects), 0, "Project was not deleted!")  
        self.assertFalse(os.path.exists(project_root), "Root folder not deleted!")  
        
        
    def test_remove_project_wrong_id(self):
        """
        Flow for deleting a project giving an un-existing id.
        """
        TestFactory.create_project(self.test_user, 'test_proj')
        projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(projects), 1, "Initializations failed!") 
        self.assertRaises(ProjectServiceException, self.project_service.remove_project, 99)   
    

    @staticmethod
    def _create_value_wrapper(test_user, test_project=None):
        """
        Creates a ValueWrapper dataType, and the associated parent Operation.
        This is also used in ProjectStructureTest.
        """
        if test_project is None:
            test_project = TestFactory.create_project(test_user, 'test_proj')
        operation = TestFactory.create_operation(test_user=test_user, test_project=test_project)
        value_wrapper = ValueWrapper(data_value=5.0, data_name="my_value")
        value_wrapper.type = "ValueWrapper"
        value_wrapper.module = "tvb.datatypes.mapped_values"
        value_wrapper.subject = "John Doe"
        value_wrapper.state = "RAW_STATE"
        value_wrapper.set_operation_id(operation.id)
        adapter_instance = StoreAdapter([value_wrapper])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        all_value_wrappers = FlowService().get_available_datatypes(test_project.id,
                                                                   "tvb.datatypes.mapped_values.ValueWrapper")[0]
        if len(all_value_wrappers) != 1:
            raise Exception("Should be only one value wrapper.")
        result_vw = ABCAdapter.load_entity_by_gid(all_value_wrappers[0][2])
        return test_project, result_vw.gid, operation.gid

     
    def __check_meta_data(self, expected_meta_data, new_datatype):
        """Validate Meta-Data"""
        mapp_keys = {DataTypeMetaData.KEY_SUBJECT: "subject", DataTypeMetaData.KEY_STATE: "state"}
        for key, value in expected_meta_data.iteritems():
            if key in mapp_keys:
                self.assertEqual(value, getattr(new_datatype, mapp_keys[key]))
            elif key == DataTypeMetaData.KEY_OPERATION_TAG:
                if DataTypeMetaData.KEY_OP_GROUP_ID in expected_meta_data:
                    ## We have a Group to check
                    op_group = new_datatype.parent_operation.fk_operation_group
                    op_group = dao.get_generic_entity(model.OperationGroup, op_group)[0]
                    self.assertEqual(value, op_group.name)
                else:
                    self.assertEqual(value, new_datatype.parent_operation.user_group) 
    
    
    def test_remove_project_node(self):
        """
        Test removing of a node from a project.
        """
        inserted_project, gid, gid_op = self._create_value_wrapper(self.test_user) 
        project_to_link = model.Project("Link", self.test_user.id, "descript")
        project_to_link = dao.store_entity(project_to_link)
        exact_data = dao.get_datatype_by_gid(gid)
        dao.store_entity(model.Links(exact_data.id, project_to_link.id))
        self.assertTrue(dao.get_datatype_by_gid(gid) is not None, "Initialization problem!")
        
        operation_id = dao.get_generic_entity(model.Operation, gid_op, 'gid')[0].id
        op_folder = self.structure_helper.get_project_folder("test_proj", str(operation_id))
        self.assertTrue(os.path.exists(op_folder))
        sub_files = os.listdir(op_folder)
        self.assertEqual(2, len(sub_files))
        ### Validate that no more files are created than needed.
        
        self.project_service._remove_project_node_files(inserted_project.id, gid)
        sub_files = os.listdir(op_folder)
        self.assertEqual(1, len(sub_files))
        ### operation.xml file should still be there
        
        op_folder = self.structure_helper.get_project_folder("Link", str(operation_id + 1)) 
        sub_files = os.listdir(op_folder)
        self.assertEqual(2, len(sub_files))
        self.assertTrue(dao.get_datatype_by_gid(gid) is not None, "Data should still be in DB, because of links")
        self.project_service._remove_project_node_files(project_to_link.id, gid)
        self.assertTrue(dao.get_datatype_by_gid(gid) is None)  
        sub_files = os.listdir(op_folder)
        self.assertEqual(1, len(sub_files))
        ### operation.xml file should still be there
        
        
    def test_update_meta_data_simple(self):
        """
        Test the new update metaData for a simple data that is not part of a group.
        """
        inserted_project, gid, _ = self._create_value_wrapper(self.test_user)
        new_meta_data = {DataTypeOverlayDetails.DATA_SUBJECT: "new subject",
                         DataTypeOverlayDetails.DATA_STATE: "second_state",
                         DataTypeOverlayDetails.CODE_GID: gid,
                         DataTypeOverlayDetails.CODE_OPERATION_TAG: 'new user group'}
        self.project_service.update_metadata(new_meta_data)
        
        new_datatype = dao.get_datatype_by_gid(gid)
        self.__check_meta_data(new_meta_data, new_datatype)
        
        op_path = FilesHelper().get_operation_meta_file_path(inserted_project.name, new_datatype.parent_operation.id)
        op_meta = XMLReader(op_path).read_metadata()
        self.assertEqual(op_meta['user_group'], 'new user group', 'UserGroup not updated!')


    def test_update_meta_data_group(self):
        """
        Test the new update metaData for a group of dataTypes.
        """
        datatypes, group_id = TestFactory.create_group(self.test_user, subject="test-subject-1")

        new_meta_data = {DataTypeOverlayDetails.DATA_SUBJECT: "new subject",
                         DataTypeOverlayDetails.DATA_STATE: "updated_state",
                         DataTypeOverlayDetails.CODE_OPERATION_GROUP_ID: group_id,
                         DataTypeOverlayDetails.CODE_OPERATION_TAG: 'newGroupName'}
        self.project_service.update_metadata(new_meta_data)  
          
        for datatype in datatypes:
            new_datatype = dao.get_datatype_by_id(datatype.id)
            self.assertEqual(group_id, new_datatype.parent_operation.fk_operation_group)
            new_group = dao.get_generic_entity(model.OperationGroup, group_id)[0]
            self.assertEqual(new_group.name, "newGroupName") 
            self.__check_meta_data(new_meta_data, new_datatype)
            
    
    def _create_datatypes(self, dt_factory, nr_of_dts):
        for idx in range(nr_of_dts):
            dt = Datatype1()
            dt.row1 = "value%i" % (idx,)
            dt.row2 = "value%i" % (idx + 1,)
            dt_factory._store_datatype(dt)
            
            
    def test_retrieve_project_full(self):
        """
        Tests full project information is retrieved by method `ProjectService.retrieve_project_full(...)`
        """
        dt_factory = datatypes_factory.DatatypesFactory()
        self._create_datatypes(dt_factory, 3)
        _, ops_nr, operations, pages_no = self.project_service.retrieve_project_full(dt_factory.project.id)
        self.assertEqual(ops_nr, 1, "DataType Factory should only use one operation to store all it's datatypes.")
        self.assertEqual(pages_no, 1, "DataType Factory should only use one operation to store all it's datatypes.")
        resulted_dts = operations[0]['results']
        self.assertEqual(len(resulted_dts), 3, "3 datatypes should be created.")
        
        
    def test_get_project_structure(self):
        """
        Tests project structure is as expected and contains all datatypes
        """
        SELF_DTS_NUMBER = 3
        dt_factory_1 = datatypes_factory.DatatypesFactory()
        self._create_datatypes(dt_factory_1, SELF_DTS_NUMBER)
        dt_group = dt_factory_1.create_datatype_group()

        link_ids, expected_links = [], []
        # Prepare link towards a simple DT
        dt_factory_2 = datatypes_factory.DatatypesFactory()
        dt_to_link = dt_factory_2.create_simple_datatype()
        link_ids.append(dt_to_link.id)
        expected_links.append(dt_to_link.gid)

        # Prepare links towards a full DT Group, but expecting only the DT_Group in the final tree
        link_gr = dt_factory_2.create_datatype_group()
        dts = dao.get_datatype_in_group(datatype_group_id=link_gr.id)
        link_ids.extend([dt_to_link.id for dt_to_link in dts])
        link_ids.append(link_gr.id)
        expected_links.append(link_gr.gid)

        # Prepare link towards a single DT inside a group, and expecting to find the DT in the final tree
        link_gr = dt_factory_2.create_datatype_group()
        dt_to_link = dao.get_datatype_in_group(datatype_group_id=link_gr.id)[0]
        link_ids.append(dt_to_link.id)
        expected_links.append(dt_to_link.gid)

        # Actually create the links from Prj2 into Prj1
        FlowService().create_link(link_ids, dt_factory_1.project.id)

        # Retrieve the raw data used to compose the tree (for easy parsing)
        dts_in_tree = dao.get_data_in_project(dt_factory_1.project.id)
        dts_in_tree = [dt.gid for dt in dts_in_tree]
        # Retrieve the tree json (for trivial validations only, as we can not decode)
        node_json = self.project_service.get_project_structure(dt_factory_1.project, None, DataTypeMetaData.KEY_STATE,
                                                               DataTypeMetaData.KEY_SUBJECT, None)

        self.assertEqual(len(expected_links) + SELF_DTS_NUMBER + 2, len(dts_in_tree), "invalid number of nodes in tree")
        self.assertFalse(link_gr.gid in dts_in_tree, "DT_group where a single DT is linked is not expected.")
        self.assertTrue(dt_group.gid in dts_in_tree, "DT_Group should be in the Project Tree!")
        self.assertTrue(dt_group.gid in node_json, "DT_Group should be in the Project Tree JSON!")

        project_dts = dao.get_datatypes_in_project(dt_factory_1.project.id)
        for dt in project_dts:
            if dt.fk_datatype_group is not None:
                self.assertFalse(dt.gid in node_json, "DTs part of a group should not be")
                self.assertFalse(dt.gid in dts_in_tree, "DTs part of a group should not be")
            else:
                self.assertTrue(dt.gid in node_json, "Simple DTs and DT_Groups should be")
                self.assertTrue(dt.gid in dts_in_tree, "Simple DTs and DT_Groups should be")

        for link_gid in expected_links:
            self.assertTrue(link_gid in node_json, "Expected Link not present")
            self.assertTrue(link_gid in dts_in_tree, "Expected Link not present")
class TestProjectService(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.services.project_service module.
    """
    def transactional_setup_method(self):
        """
        Reset the database before each test.
        """
        self.project_service = ProjectService()
        self.structure_helper = FilesHelper()
        self.test_user = TestFactory.create_user()

    def transactional_teardown_method(self):
        """
        Remove project folders and clean up database.
        """
        created_projects = dao.get_projects_for_user(self.test_user.id)
        for project in created_projects:
            self.structure_helper.remove_project_structure(project.name)
        self.delete_project_folders()

    def test_create_project_happy_flow(self):

        user1 = TestFactory.create_user('test_user1')
        user2 = TestFactory.create_user('test_user2')
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        assert len(initial_projects) == 0, "Database reset probably failed!"

        TestFactory.create_project(self.test_user,
                                   'test_project',
                                   "description",
                                   users=[user1.id, user2.id])

        resulting_projects = dao.get_projects_for_user(self.test_user.id)
        assert len(
            resulting_projects) == 1, "Project with valid data not inserted!"
        project = resulting_projects[0]
        assert project.name == "test_project", "Invalid retrieved project name"
        assert project.description == "description", "Description do no match"

        users_for_project = dao.get_members_of_project(project.id)
        for user in users_for_project:
            assert user.id in [user1.id, user2.id,
                               self.test_user.id], "Users not stored properly."
        assert os.path.exists(
            os.path.join(TvbProfile.current.TVB_STORAGE,
                         FilesHelper.PROJECTS_FOLDER,
                         "test_project")), "Folder for project was not created"

    def test_create_project_empty_name(self):
        """
        Creating a project with an empty name.
        """
        data = dict(name="", description="test_description", users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        assert len(initial_projects) == 0, "Database reset probably failed!"
        with pytest.raises(ProjectServiceException):
            self.project_service.store_project(self.test_user, True, None,
                                               **data)

    def test_edit_project_happy_flow(self):
        """
        Standard flow for editing an existing project.
        """
        selected_project = TestFactory.create_project(self.test_user,
                                                      'test_proj')
        proj_root = self.structure_helper.get_project_folder(selected_project)
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        assert len(
            initial_projects) == 1, "Database initialization probably failed!"

        edited_data = dict(name="test_project",
                           description="test_description",
                           users=[])
        edited_project = self.project_service.store_project(
            self.test_user, False, selected_project.id, **edited_data)
        assert not os.path.exists(proj_root), "Previous folder not deleted"
        proj_root = self.structure_helper.get_project_folder(edited_project)
        assert os.path.exists(proj_root), "New folder not created!"
        assert selected_project.name != edited_project.name, "Project was no changed!"

    def test_edit_project_unexisting(self):
        """
        Trying to edit an un-existing project.
        """
        selected_project = TestFactory.create_project(self.test_user,
                                                      'test_proj')
        self.structure_helper.get_project_folder(selected_project)
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        assert len(
            initial_projects) == 1, "Database initialization probably failed!"
        data = dict(name="test_project",
                    description="test_description",
                    users=[])
        with pytest.raises(ProjectServiceException):
            self.project_service.store_project(self.test_user, False, 99,
                                               **data)

    def test_find_project_happy_flow(self):
        """
        Standard flow for finding a project by it's id.
        """
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        assert len(initial_projects) == 0, "Database reset probably failed!"
        inserted_project = TestFactory.create_project(self.test_user,
                                                      'test_project')
        assert self.project_service.find_project(
            inserted_project.id) is not None, "Project not found !"
        dao_returned_project = dao.get_project_by_id(inserted_project.id)
        service_returned_project = self.project_service.find_project(
            inserted_project.id)
        assert dao_returned_project.id == service_returned_project.id, \
            "Data returned from service is different from data returned by DAO."
        assert dao_returned_project.name == service_returned_project.name, \
            "Data returned from service is different than  data returned by DAO."
        assert dao_returned_project.description == service_returned_project.description, \
            "Data returned from service is different from data returned by DAO."
        assert dao_returned_project.members == service_returned_project.members, \
            "Data returned from service is different from data returned by DAO."

    def test_find_project_unexisting(self):
        """
        Searching for an un-existing project.
        """
        data = dict(name="test_project",
                    description="test_description",
                    users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        assert len(initial_projects) == 0, "Database reset probably failed!"
        project = self.project_service.store_project(self.test_user, True,
                                                     None, **data)
        # fetch a likely non-existing project. Previous project id plus a 'big' offset
        with pytest.raises(ProjectServiceException):
            self.project_service.find_project(project.id + 1033)

    def test_retrieve_projects_for_user(self):
        """
        Test for retrieving the projects for a given user. One page only.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        assert len(initial_projects) == 0, "Database was not reset properly!"
        TestFactory.create_project(self.test_user, 'test_proj')
        TestFactory.create_project(self.test_user, 'test_proj1')
        TestFactory.create_project(self.test_user, 'test_proj2')
        user1 = TestFactory.create_user('another_user')
        TestFactory.create_project(user1, 'test_proj3')
        projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        assert len(projects) == 3, "Projects not retrieved properly!"
        for project in projects:
            assert project.name != "test_project3", "This project should not have been retrieved"

    def test_retrieve_1project_3usr(self):
        """
        One user as admin, two users as members, getting projects for admin and for any of
        the members should return one.
        """
        member1 = TestFactory.create_user("member1")
        member2 = TestFactory.create_user("member2")
        TestFactory.create_project(self.test_user,
                                   'Testproject',
                                   users=[member1.id, member2.id])
        projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 1)[0]
        assert len(projects) == 1, "Projects not retrieved properly!"
        projects = self.project_service.retrieve_projects_for_user(
            member1.id, 1)[0]
        assert len(projects) == 1, "Projects not retrieved properly!"
        projects = self.project_service.retrieve_projects_for_user(
            member2.id, 1)[0]
        assert len(projects) == 1, "Projects not retrieved properly!"

    def test_retrieve_3projects_3usr(self):
        """
        Three users, 3 projects. Structure of db:
        proj1: {admin: user1, members: [user2, user3]}
        proj2: {admin: user2, members: [user1]}
        proj3: {admin: user3, members: [user1, user2]}
        Check valid project returns for all the users.
        """
        member1 = TestFactory.create_user("member1")
        member2 = TestFactory.create_user("member2")
        member3 = TestFactory.create_user("member3")
        TestFactory.create_project(member1,
                                   'TestProject1',
                                   users=[member2.id, member3.id])
        TestFactory.create_project(member2, 'TestProject2', users=[member1.id])
        TestFactory.create_project(member3,
                                   'TestProject3',
                                   users=[member1.id, member2.id])
        projects = self.project_service.retrieve_projects_for_user(
            member1.id, 1)[0]
        assert len(projects) == 3, "Projects not retrieved properly!"
        projects = self.project_service.retrieve_projects_for_user(
            member2.id, 1)[0]
        assert len(projects) == 3, "Projects not retrieved properly!"
        projects = self.project_service.retrieve_projects_for_user(
            member3.id, 1)[0]
        assert len(projects) == 2, "Projects not retrieved properly!"

    def test_retrieve_projects_random(self):
        """
        Generate a large number of users/projects, and validate the results.
        """
        ExtremeTestFactory.generate_users(NR_USERS, MAX_PROJ_PER_USER)
        for i in range(NR_USERS):
            current_user = dao.get_user_by_name("gen" + str(i))
            expected_projects = ExtremeTestFactory.VALIDATION_DICT[
                current_user.id]
            if expected_projects % PROJECTS_PAGE_SIZE == 0:
                expected_pages = expected_projects / PROJECTS_PAGE_SIZE
                exp_proj_per_page = PROJECTS_PAGE_SIZE
            else:
                expected_pages = expected_projects // PROJECTS_PAGE_SIZE + 1
                exp_proj_per_page = expected_projects % PROJECTS_PAGE_SIZE
            if expected_projects == 0:
                expected_pages = 0
                exp_proj_per_page = 0
            projects, pages = self.project_service.retrieve_projects_for_user(
                current_user.id, expected_pages)
            assert len(projects) == exp_proj_per_page, "Projects not retrieved properly! Expected:" + \
                                                       str(exp_proj_per_page) + "but got:" + str(len(projects))
            assert pages == expected_pages, "Pages not retrieved properly!"

        for folder in os.listdir(TvbProfile.current.TVB_STORAGE):
            full_path = os.path.join(TvbProfile.current.TVB_STORAGE, folder)
            if os.path.isdir(full_path) and folder.startswith('Generated'):
                shutil.rmtree(full_path)

    def test_retrieve_projects_page2(self):
        """
        Test for retrieving the second page projects for a given user.
        """
        for i in range(PROJECTS_PAGE_SIZE + 3):
            TestFactory.create_project(self.test_user, 'test_proj' + str(i))
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 2)
        assert len(projects) == (PROJECTS_PAGE_SIZE + 3
                                 ) % PROJECTS_PAGE_SIZE, "Pagination inproper."
        assert pages == 2, 'Wrong number of pages retrieved.'

    def test_retrieve_projects_and_del(self):
        """
        Test for retrieving the second page projects for a given user.
        """
        created_projects = []
        for i in range(PROJECTS_PAGE_SIZE + 1):
            created_projects.append(
                TestFactory.create_project(self.test_user,
                                           'test_proj' + str(i)))
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 2)
        assert len(projects) == (PROJECTS_PAGE_SIZE + 1
                                 ) % PROJECTS_PAGE_SIZE, "Pagination improper."
        assert pages == (PROJECTS_PAGE_SIZE +
                         1) // PROJECTS_PAGE_SIZE + 1, 'Wrong number of pages'
        self.project_service.remove_project(created_projects[1].id)
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 2)
        assert len(projects) == 0, "Pagination improper."
        assert pages == 1, 'Wrong number of pages retrieved.'
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 1)
        assert len(projects) == PROJECTS_PAGE_SIZE, "Pagination improper."
        assert pages == 1, 'Wrong number of pages retrieved.'

    def test_empty_project_has_zero_disk_size(self):
        TestFactory.create_project(self.test_user, 'test_proj')
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id)
        assert 0 == projects[0].disk_size
        assert '0.0 KiB' == projects[0].disk_size_human

    def test_project_disk_size(self):
        project1 = TestFactory.create_project(self.test_user, 'test_proj1')
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_66.zip')
        TestFactory.import_zip_connectivity(self.test_user, project1, zip_path,
                                            'testSubject')

        project2 = TestFactory.create_project(self.test_user, 'test_proj2')
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_76.zip')
        TestFactory.import_zip_connectivity(self.test_user, project2, zip_path,
                                            'testSubject')

        projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        assert projects[0].disk_size != projects[
            1].disk_size, "projects should have different size"

        for project in projects:
            assert 0 != project.disk_size
            assert '0.0 KiB' != project.disk_size_human

            prj_folder = self.structure_helper.get_project_folder(project)
            actual_disk_size = FilesHelper.compute_recursive_h5_disk_usage(
                prj_folder)

            ratio = float(actual_disk_size) / project.disk_size
            msg = "Real disk usage: %s The one recorded in the db : %s" % (
                actual_disk_size, project.disk_size)
            assert ratio < 1.1, msg

    def test_get_linkable_projects(self):
        """
        Test for retrieving the projects for a given user.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        assert len(initial_projects) == 0, "Database was not reset!"
        test_proj = []
        user1 = TestFactory.create_user("another_user")
        for i in range(4):
            test_proj.append(
                TestFactory.create_project(self.test_user if i < 3 else user1,
                                           'test_proj' + str(i)))
        operation = TestFactory.create_operation(test_user=self.test_user,
                                                 test_project=test_proj[0])
        datatype = dao.store_entity(
            model_datatype.DataType(module="test_data",
                                    subject="subj1",
                                    state="test_state",
                                    operation_id=operation.id))

        linkable = self.project_service.get_linkable_projects_for_user(
            self.test_user.id, str(datatype.id))[0]

        assert len(linkable) == 2, "Wrong count of link-able projects!"
        proj_names = [project.name for project in linkable]
        assert test_proj[1].name in proj_names
        assert test_proj[2].name in proj_names
        assert not test_proj[3].name in proj_names

    def test_remove_project_happy_flow(self):
        """
        Standard flow for deleting a project.
        """
        inserted_project = TestFactory.create_project(self.test_user,
                                                      'test_proj')
        project_root = self.structure_helper.get_project_folder(
            inserted_project)
        projects = dao.get_projects_for_user(self.test_user.id)
        assert len(projects) == 1, "Initializations failed!"
        assert os.path.exists(project_root), "Something failed at insert time!"
        self.project_service.remove_project(inserted_project.id)
        projects = dao.get_projects_for_user(self.test_user.id)
        assert len(projects) == 0, "Project was not deleted!"
        assert not os.path.exists(project_root), "Root folder not deleted!"

    def test_remove_project_wrong_id(self):
        """
        Flow for deleting a project giving an un-existing id.
        """
        TestFactory.create_project(self.test_user, 'test_proj')
        projects = dao.get_projects_for_user(self.test_user.id)
        assert len(projects) == 1, "Initializations failed!"
        with pytest.raises(ProjectServiceException):
            self.project_service.remove_project(99)

    def __check_meta_data(self, expected_meta_data, new_datatype):
        """Validate Meta-Data"""
        mapp_keys = {
            DataTypeOverlayDetails.DATA_SUBJECT: "subject",
            DataTypeOverlayDetails.DATA_STATE: "state"
        }
        for key, value in expected_meta_data.items():
            if key in mapp_keys:
                assert value == getattr(new_datatype, mapp_keys[key])
            elif key == DataTypeMetaData.KEY_OPERATION_TAG:
                if DataTypeMetaData.KEY_OP_GROUP_ID in expected_meta_data:
                    # We have a Group to check
                    op_group = new_datatype.parent_operation.fk_operation_group
                    op_group = dao.get_generic_entity(
                        model_operation.OperationGroup, op_group)[0]
                    assert value == op_group.name
                else:
                    assert value == new_datatype.parent_operation.user_group

    def test_remove_project_node(self):
        """
        Test removing of a node from a project.
        """
        inserted_project, gid, op = TestFactory.create_value_wrapper(
            self.test_user)
        project_to_link = model_project.Project("Link", self.test_user.id,
                                                "descript")
        project_to_link = dao.store_entity(project_to_link)
        exact_data = dao.get_datatype_by_gid(gid)
        assert exact_data is not None, "Initialization problem!"
        dao.store_entity(
            model_datatype.Links(exact_data.id, project_to_link.id))

        vw_h5_path = h5.path_for_stored_index(exact_data)
        assert os.path.exists(vw_h5_path)

        if dao.get_system_user() is None:
            dao.store_entity(
                model_operation.User(
                    TvbProfile.current.web.admin.SYSTEM_USER_NAME,
                    TvbProfile.current.web.admin.SYSTEM_USER_NAME, None, None,
                    True, None))

        self.project_service._remove_project_node_files(
            inserted_project.id, gid)

        assert not os.path.exists(vw_h5_path)
        exact_data = dao.get_datatype_by_gid(gid)
        assert exact_data is not None, "Data should still be in DB, because of links"
        vw_h5_path_new = h5.path_for_stored_index(exact_data)
        assert os.path.exists(vw_h5_path_new)
        assert vw_h5_path_new != vw_h5_path

        self.project_service._remove_project_node_files(
            project_to_link.id, gid)
        assert dao.get_datatype_by_gid(gid) is None

    def test_update_meta_data_simple(self):
        """
        Test the new update metaData for a simple data that is not part of a group.
        """
        inserted_project, gid, _ = TestFactory.create_value_wrapper(
            self.test_user)
        new_meta_data = {
            DataTypeOverlayDetails.DATA_SUBJECT: "new subject",
            DataTypeOverlayDetails.DATA_STATE: "second_state",
            DataTypeOverlayDetails.CODE_GID: gid,
            DataTypeOverlayDetails.CODE_OPERATION_TAG: 'new user group'
        }
        self.project_service.update_metadata(new_meta_data)

        new_datatype = dao.get_datatype_by_gid(gid)
        self.__check_meta_data(new_meta_data, new_datatype)

        new_datatype_h5 = h5.h5_file_for_index(new_datatype)
        assert new_datatype_h5.subject.load(
        ) == 'new subject', 'UserGroup not updated!'

    def test_update_meta_data_group(self, test_adapter_factory):
        """
        Test the new update metaData for a group of dataTypes.
        """
        test_adapter_factory(adapter_class=TestAdapter3)
        op_group_id = TestFactory.create_group(test_user=self.test_user)[1]

        new_meta_data = {
            DataTypeOverlayDetails.DATA_SUBJECT: "new subject",
            DataTypeOverlayDetails.DATA_STATE: "updated_state",
            DataTypeOverlayDetails.CODE_OPERATION_GROUP_ID: op_group_id,
            DataTypeOverlayDetails.CODE_OPERATION_TAG: 'newGroupName'
        }
        self.project_service.update_metadata(new_meta_data)
        datatypes = dao.get_datatype_in_group(op_group_id)
        for datatype in datatypes:
            new_datatype = dao.get_datatype_by_id(datatype.id)
            assert op_group_id == new_datatype.parent_operation.fk_operation_group
            new_group = dao.get_generic_entity(model_operation.OperationGroup,
                                               op_group_id)[0]
            assert new_group.name == "newGroupName"
            self.__check_meta_data(new_meta_data, new_datatype)

    def test_retrieve_project_full(self, dummy_datatype_index_factory):
        """
        Tests full project information is retrieved by method `ProjectService.retrieve_project_full(...)`
        """

        project = TestFactory.create_project(self.test_user)
        operation = TestFactory.create_operation(test_user=self.test_user,
                                                 test_project=project)

        dummy_datatype_index_factory(project=project, operation=operation)
        dummy_datatype_index_factory(project=project, operation=operation)
        dummy_datatype_index_factory(project=project, operation=operation)

        _, ops_nr, operations, pages_no = self.project_service.retrieve_project_full(
            project.id)
        assert ops_nr == 1, "DataType Factory should only use one operation to store all it's datatypes."
        assert pages_no == 1, "DataType Factory should only use one operation to store all it's datatypes."
        resulted_dts = operations[0]['results']
        assert len(resulted_dts) == 3, "3 datatypes should be created."

    def test_get_project_structure(self, datatype_group_factory,
                                   dummy_datatype_index_factory,
                                   project_factory, user_factory):
        """
        Tests project structure is as expected and contains all datatypes and created links
        """
        user = user_factory()
        project1 = project_factory(user, name="TestPS1")
        project2 = project_factory(user, name="TestPS2")

        dt_group = datatype_group_factory(project=project1)
        dt_simple = dummy_datatype_index_factory(state="RAW_DATA",
                                                 project=project1)
        # Create 3 DTs directly in Project 2
        dummy_datatype_index_factory(state="RAW_DATA", project=project2)
        dummy_datatype_index_factory(state="RAW_DATA", project=project2)
        dummy_datatype_index_factory(state="RAW_DATA", project=project2)

        # Create Links from Project 1 into Project 2
        link_ids, expected_links = [], []
        link_ids.append(dt_simple.id)
        expected_links.append(dt_simple.gid)

        # Prepare links towards a full DT Group, but expecting only the DT_Group in the final tree
        dts = dao.get_datatype_in_group(datatype_group_id=dt_group.id)
        link_ids.extend([dt_to_link.id for dt_to_link in dts])
        link_ids.append(dt_group.id)
        expected_links.append(dt_group.gid)

        # Actually create the links from Prj1 into Prj2
        AlgorithmService().create_link(link_ids, project2.id)

        # Retrieve the raw data used to compose the tree (for easy parsing)
        dts_in_tree = dao.get_data_in_project(project2.id)
        dts_in_tree = [dt.gid for dt in dts_in_tree]
        # Retrieve the tree json (for trivial validations only, as we can not decode)
        node_json = self.project_service.get_project_structure(
            project2, None, DataTypeMetaData.KEY_STATE,
            DataTypeMetaData.KEY_SUBJECT, None)

        assert len(expected_links) + 3 == len(
            dts_in_tree), "invalid number of nodes in tree"
        assert dt_group.gid in dts_in_tree, "DT_Group should be in the Project Tree!"
        assert dt_group.gid in node_json, "DT_Group should be in the Project Tree JSON!"

        project_dts = dao.get_datatypes_in_project(project2.id)
        for dt in project_dts:
            if dt.fk_datatype_group is not None:
                assert not dt.gid in node_json, "DTs part of a group should not be"
                assert not dt.gid in dts_in_tree, "DTs part of a group should not be"
            else:
                assert dt.gid in node_json, "Simple DTs and DT_Groups should be"
                assert dt.gid in dts_in_tree, "Simple DTs and DT_Groups should be"

        for link_gid in expected_links:
            assert link_gid in node_json, "Expected Link not present"
            assert link_gid in dts_in_tree, "Expected Link not present"
class ProjectController(BaseController):
    """
    Displays pages which deals with Project data management.
    """

    PRROJECTS_FOR_LINK_KEY = "projectsforlink"
    PRROJECTS_LINKED_KEY = "projectslinked"
    KEY_OPERATION_FILTERS = "operationfilters"

    def __init__(self):
        super(ProjectController, self).__init__()
        self.project_service = ProjectService()


    @expose_page
    @settings
    def index(self):
        """
        Display project main-menu. Choose one project to work with.
        """
        current_project = common.get_current_project()
        if current_project is None:
            raise cherrypy.HTTPRedirect("/project/viewall")
        template_specification = dict(mainContent="project_submenu", title="TVB Project Menu")
        return self.fill_default_attributes(template_specification)


    @expose_page
    @settings
    def viewall(self, create=False, page=1, selected_project_id=None, **_):
        """
        Display all existent projects. Choose one project to work with.
        """
        page = int(page)
        if cherrypy.request.method == 'POST' and create:
            raise cherrypy.HTTPRedirect('/project/editone')
        current_user_id = common.get_logged_user().id

        ## Select project if user choose one.
        if selected_project_id is not None:
            try:
                selected_project = self.project_service.find_project(selected_project_id)
                self._mark_selected(selected_project)
            except ProjectServiceException as excep:
                self.logger.error(excep)
                self.logger.warning("Could not select project: " + str(selected_project_id))
                common.set_error_message("Could not select project: " + str(selected_project_id))

        #Prepare template response
        prjs, pages_no = self.project_service.retrieve_projects_for_user(current_user_id, page)
        template_specification = dict(mainContent="project/viewall", title="Available TVB Projects",
                                      projectsList=prjs, page_number=page, total_pages=pages_no)
        return self.fill_default_attributes(template_specification, 'list')


    @cherrypy.expose
    @handle_error(redirect=True)
    @check_user
    @settings
    def projectupload(self, **data):
        """Upload Project from TVB ZIP."""
        self.logger.debug("Uploading ..." + str(data))
        try:
            upload_param = "uploadedfile"
            if upload_param in data and data[upload_param]:
                import_service = ImportService()
                import_service.import_project_structure(data[upload_param], common.get_logged_user().id)
        except ServicesBaseException as excep:
            self.logger.warning(excep.message)
            common.set_error_message(excep.message)
        raise cherrypy.HTTPRedirect('/project/viewall')


    def _remove_project(self, project_id):
        """Private method for removing project."""
        try:
            self.project_service.remove_project(project_id)
        except ServicesBaseException as exc:
            self.logger.error("Could not delete project!")
            self.logger.exception(exc)
            common.set_error_message(exc.message)
        prj = common.get_current_project()
        if prj is not None and prj.id == int(project_id):
            common.remove_from_session(common.KEY_PROJECT)


    def _persist_project(self, data, project_id, is_create, current_user):
        """Private method to persist"""
        data = EditForm().to_python(data)
        saved_project = self.project_service.store_project(current_user, is_create, project_id, **data)
        selected_project = common.get_current_project()
        if len(self.project_service.retrieve_projects_for_user(current_user.id, 1)) == 1:
            selected_project = saved_project
        if selected_project is None or (saved_project.id == selected_project.id):
            self._mark_selected(saved_project)


    @expose_page
    @settings
    def editone(self, project_id=None, cancel=False, save=False, delete=False, **data):
        """
        Create or change Project. When project_id is empty we create a 
        new entity, otherwise we are to edit and existent one.
        """
        if cherrypy.request.method == 'POST' and cancel:
            raise cherrypy.HTTPRedirect('/project')
        if cherrypy.request.method == 'POST' and delete:
            self._remove_project(project_id)
            raise cherrypy.HTTPRedirect('/project/viewall')

        current_user = common.get_logged_user()
        is_create = False
        if project_id is None or not int(project_id):
            is_create = True
            data["administrator"] = current_user.username
        else:
            current_project = self.project_service.find_project(project_id)
            if not save:
                # Only when we do not have submitted data,
                # populate fields with initial values for edit.
                data = dict(name=current_project.name, description=current_project.description)
            data["administrator"] = current_project.administrator.username
            self._mark_selected(current_project)
        data["project_id"] = project_id

        template_specification = dict(mainContent="project/editone", data=data, isCreate=is_create,
                                      title="Create new project" if is_create else "Edit " + data["name"],
                                      editUsersEnabled=(current_user.username == data['administrator']))
        try:
            if cherrypy.request.method == 'POST' and save:
                common.remove_from_session(common.KEY_PROJECT)
                common.remove_from_session(common.KEY_CACHED_SIMULATOR_TREE)
                self._persist_project(data, project_id, is_create, current_user)
                raise cherrypy.HTTPRedirect('/project/viewall')
        except formencode.Invalid as excep:
            self.logger.debug(str(excep))
            template_specification[common.KEY_ERRORS] = excep.unpack_errors()
        except ProjectServiceException as excep:
            self.logger.debug(str(excep))
            common.set_error_message(excep.message)
            raise cherrypy.HTTPRedirect('/project/viewall')

        all_users, members, pages = self.user_service.get_users_for_project(current_user.username, project_id)
        template_specification['usersList'] = all_users
        template_specification['usersMembers'] = [m.id for m in members]
        template_specification['usersPages'] = pages
        template_specification['usersCurrentPage'] = 1
        return self.fill_default_attributes(template_specification, 'properties')


    @expose_fragment('project/project_members')
    def getmemberspage(self, page, project_id=None):
        """Retrieve a new page of Project members."""
        current_name = common.get_logged_user().username
        all_users, members, _ = self.user_service.get_users_for_project(current_name, project_id, int(page))
        edit_enabled = True
        if project_id is not None:
            current_project = self.project_service.find_project(project_id)
            edit_enabled = (current_name == current_project.administrator.username)
        return dict(usersList=all_users, usersMembers=[m.id for m in members],
                    usersCurrentPage=page, editUsersEnabled=edit_enabled)


    @expose_json
    def set_visibility(self, entity_type, entity_gid, to_de_relevant):
        """
        Method used for setting the relevancy/visibility on a DataType(Group)/Operation(Group.
        """
        to_de_relevant = string2bool(to_de_relevant)
        is_operation, is_group = False, False
        if entity_type == graph_structures.NODE_OPERATION_TYPE:
            is_group = False
            is_operation = True
        elif entity_type == graph_structures.NODE_OPERATION_GROUP_TYPE:
            is_group = True
            is_operation = True

        if is_operation:
            self.project_service.set_operation_and_group_visibility(entity_gid, to_de_relevant, is_group)
        else:
            self.project_service.set_datatype_visibility(entity_gid, to_de_relevant)


    @expose_page
    @settings
    def viewoperations(self, project_id=None, page=1, filtername=None, reset_filters=None):
        """
        Display table of operations for a given project selected
        """
        if (project_id is None) or (not int(project_id)):
            raise cherrypy.HTTPRedirect('/project')

        ## Toggle filters
        filters = self.__get_operations_filters()
        selected_filters = None
        for my_filter in filters:
            if cherrypy.request.method == 'POST' and (filtername is not None):
                if reset_filters:
                    my_filter.selected = False
                elif my_filter.display_name == filtername:
                    my_filter.selected = not my_filter.selected
            if my_filter.selected:
                selected_filters = my_filter + selected_filters
        ## Iterate one more time, to update counters
        for my_filter in filters:
            if not my_filter.selected:
                new_count = self.project_service.count_filtered_operations(project_id, my_filter + selected_filters)
                my_filter.passes_count = new_count
            else:
                my_filter.passes_count = ''

        page = int(page)
        project, total_op_count, filtered_ops, pages_no = self.project_service.retrieve_project_full(
            project_id, selected_filters, page)
        ## Select current project
        self._mark_selected(project)

        template_specification = dict(mainContent="project/viewoperations", project=project,
                                      title='Past operations for " ' + project.name + '"', operationsList=filtered_ops,
                                      total_op_count=total_op_count, total_pages=pages_no, page_number=page,
                                      filters=filters, no_filter_selected=(selected_filters is None), model=model)
        return self.fill_default_attributes(template_specification, 'operations')
    
    
    @expose_fragment("call_out_project")
    def generate_call_out_control(self):
        """
        Returns the content of a confirmation dialog, with a given question. 
        """
        self.update_operations_count()
        return {'selectedProject': common.get_current_project()}


    def __get_operations_filters(self):
        """
        Filters for VIEW_ALL_OPERATIONS page.
        Get from session currently selected filters, or build a new set of filters.
        """
        session_filtes = common.get_from_session(self.KEY_OPERATION_FILTERS)
        if session_filtes:
            return session_filtes

        else:
            sim_group = self.flow_service.get_algorithm_by_module_and_class(IntrospectionRegistry.SIMULATOR_MODULE,
                                                                            IntrospectionRegistry.SIMULATOR_CLASS)
            new_filters = StaticFiltersFactory.build_operations_filters(sim_group, common.get_logged_user().id)
            common.add2session(self.KEY_OPERATION_FILTERS, new_filters)
            return new_filters


    @expose_fragment("overlay_confirmation")
    def show_confirmation_overlay(self, **data):
        """
        Returns the content of a confirmation dialog, with a given question.
        """
        if not data:
            data = {}
        question = data.get('question', "Are you sure ?")
        data['question'] = question
        return self.fill_default_attributes(data)
    

    @expose_fragment("overlay")
    def get_datatype_details(self, entity_gid, back_page='null', exclude_tabs=None):
        """
        Returns the HTML which contains the details for the given dataType.
        :param back_page: if different from 'null' (the default) it will redirect to it after saving metedata changes
        """
        if exclude_tabs is None:
            exclude_tabs = []
        selected_project = common.get_current_project()
        datatype_details, states, entity = self.project_service.get_datatype_details(entity_gid)

        ### Load DataType categories
        current_type = datatype_details.data_type
        datatype_gid = datatype_details.gid
        categories = {}
        if not entity.invalid:
            categories = self.flow_service.get_launchable_algorithms(datatype_gid)

        is_group = False
        if datatype_details.operation_group_id is not None:
            ## Is a DataTypeGroup
            is_group = True

        ### Retrieve links
        linkable_projects_dict = self._get_linkable_projects_dict(entity.id)
        ### Load all exporters
        exporters = {}
        if not entity.invalid:
            exporters = ExportManager().get_exporters_for_data(entity)
        is_relevant = entity.visible

        template_specification = {"entity_gid": entity_gid,
                                  "nodeFields": datatype_details.get_ui_fields(),
                                  "allStates": states,
                                  "project": selected_project,
                                  "categories": categories,
                                  "exporters": exporters,
                                  "datatype_id": entity.id,
                                  "isGroup": is_group,
                                  "isRelevant": is_relevant,
                                  "nodeType": 'datatype',
                                  "backPageIdentifier": back_page}
        template_specification.update(linkable_projects_dict)

        overlay_class = "can-browse editor-node node-type-" + str(current_type).lower()
        if is_relevant:
            overlay_class += " node-relevant"
        else:
            overlay_class += " node_irrelevant"
        overlay_title = current_type
        if datatype_details.datatype_tag_1:
            overlay_title += " " + datatype_details.datatype_tag_1

        tabs = []
        overlay_indexes = []
        if "Metadata" not in exclude_tabs:
            tabs.append(OverlayTabDefinition("Metadata", "metadata"))
            overlay_indexes.append(0)
        if "Analyzers" not in exclude_tabs:
            tabs.append(OverlayTabDefinition("Analyzers", "analyzers", enabled=categories and 'Analyze' in categories))
            overlay_indexes.append(1)
        if "Visualizers" not in exclude_tabs:
            tabs.append(OverlayTabDefinition("Visualizers", "visualizers", enabled=categories and 'View' in categories))
            overlay_indexes.append(2)

        enable_link_tab = False
        if (not entity.invalid) and (linkable_projects_dict is not None):
            projects_for_link = linkable_projects_dict.get(self.PRROJECTS_FOR_LINK_KEY)
            if projects_for_link is not None and len(projects_for_link) > 0:
                enable_link_tab = True
            projects_linked = linkable_projects_dict.get(self.PRROJECTS_LINKED_KEY)
            if projects_linked is not None and len(projects_linked) > 0:
                enable_link_tab = True
        if "Links" not in exclude_tabs:
            tabs.append(OverlayTabDefinition("Links", "link_to", enabled=enable_link_tab))
            overlay_indexes.append(3)
        if "Export" not in exclude_tabs:
            tabs.append(OverlayTabDefinition("Export", "export", enabled=(exporters and len(exporters) > 0)))
            overlay_indexes.append(4)
        if "Derived DataTypes" not in exclude_tabs:
            tabs.append(OverlayTabDefinition("Derived DataTypes", "result_dts",
                                             enabled=self.project_service.count_datatypes_generated_from(entity_gid)))
            overlay_indexes.append(5)
        template_specification = self.fill_overlay_attributes(template_specification, "DataType Details",
                                                              overlay_title, "project/details_datatype_overlay",
                                                              overlay_class, tabs, overlay_indexes)
        template_specification['baseUrl'] = TvbProfile.current.web.BASE_URL
        return FlowController().fill_default_attributes(template_specification)


    @expose_fragment('project/linkable_projects')
    def get_linkable_projects(self, datatype_id, is_group, entity_gid):
        """
        Returns the HTML which displays the link-able projects for the given dataType
        """
        template_specification = self._get_linkable_projects_dict(datatype_id)
        template_specification["entity_gid"] = entity_gid
        template_specification["isGroup"] = is_group
        return template_specification


    def _get_linkable_projects_dict(self, datatype_id):
        """" UI ready dictionary with projects in which current DataType can be linked."""
        self.logger.debug("Searching projects to link for DT " + str(datatype_id))
        for_link, linked = self.project_service.get_linkable_projects_for_user(common.get_logged_user().id, datatype_id)

        projects_for_link, linked_projects = None, None
        if for_link:
            projects_for_link = {}
            for project in for_link:
                projects_for_link[project.id] = project.name

        if linked:
            linked_projects = {}
            for project in linked:
                linked_projects[project.id] = project.name

        template_specification = {self.PRROJECTS_FOR_LINK_KEY: projects_for_link,
                                  self.PRROJECTS_LINKED_KEY: linked_projects,
                                  "datatype_id": datatype_id}
        return template_specification


    @expose_fragment("overlay")
    def get_operation_details(self, entity_gid, is_group=False, back_page='burst'):
        """
        Returns the HTML which contains the details for the given operation.
        """
        if string2bool(str(is_group)):
            ### we have an OperationGroup entity.
            template_specification = self._compute_operation_details(entity_gid, True)
            #I expect that all the operations from a group are visible or not
            template_specification["nodeType"] = graph_structures.NODE_OPERATION_GROUP_TYPE

        else:
            ### we have a simple Operation
            template_specification = self._compute_operation_details(entity_gid)
            template_specification["displayRelevantButton"] = True
            template_specification["nodeType"] = graph_structures.NODE_OPERATION_TYPE

        template_specification["backPageIdentifier"] = back_page
        overlay_class = "can-browse editor-node node-type-" + template_specification["nodeType"]
        if template_specification["isRelevant"]:
            overlay_class += " node-relevant"
        else:
            overlay_class += " node_irrelevant"

        template_specification = self.fill_overlay_attributes(template_specification, "Details", "Operation",
                                                              "project/details_operation_overlay", overlay_class)
        return FlowController().fill_default_attributes(template_specification)


    def _compute_operation_details(self, entity_gid, is_group=False):
        """
        Returns a dictionary which contains the details for the given operation.
        """
        selected_project = common.get_current_project()
        op_details = self.project_service.get_operation_details(entity_gid, is_group)
        operation_id = op_details.operation_id

        display_reload_btn = True
        operation = self.flow_service.load_operation(operation_id)

        if (operation.fk_operation_group is not None) or (operation.burst is not None):
            display_reload_btn = False
        else:
            op_categ_id = operation.algorithm.fk_category
            raw_categories = self.flow_service.get_raw_categories()
            for category in raw_categories:
                if category.id == op_categ_id:
                    display_reload_btn = False
                    break

        template_specification = {"entity_gid": entity_gid,
                                  "nodeFields": op_details.get_ui_fields(),
                                  "operationId": operation_id,
                                  "displayReloadBtn": display_reload_btn,
                                  "project": selected_project,
                                  "isRelevant": operation.visible}
        return template_specification


    def get_project_structure_grouping(self):
        user = common.get_logged_user()
        return user.get_project_structure_grouping()


    def set_project_structure_grouping(self, first, second):
        user = common.get_logged_user()
        user.set_project_structure_grouping(first, second)
        self.user_service.edit_user(user)

    @expose_page
    @settings
    def editstructure(self, project_id=None, last_selected_tab="treeTab", first_level=None,
                      second_level=None, filter_input="", visibility_filter=None, **_ignored):
        """
        Return the page skeleton for displaying the project structure.
        """
        try:
            int(project_id)
        except (ValueError, TypeError):
            raise cherrypy.HTTPRedirect('/project')

        if first_level is None or second_level is None:
            first_level, second_level = self.get_project_structure_grouping()

        selected_project = self.project_service.find_project(project_id)
        self._mark_selected(selected_project)
        data = self.project_service.get_filterable_meta()
        filters = StaticFiltersFactory.build_datatype_filters(selected=visibility_filter)
        template_specification = dict(mainContent="project/structure", baseUrl=TvbProfile.current.web.BASE_URL,
                                      title=selected_project.name,
                                      project=selected_project, data=data,
                                      lastSelectedTab=last_selected_tab, firstLevelSelection=first_level,
                                      secondLevelSelection=second_level, filterInputValue=filter_input, filters=filters)
        return self.fill_default_attributes(template_specification, 'data')


    @expose_fragment("overlay")
    def get_data_uploader_overlay(self, project_id):
        """
        Returns the html which displays a dialog which allows the user
        to upload certain data into the application.
        """
        upload_algorithms = self.flow_service.get_upload_algorithms()

        flow_controller = FlowController()
        algorithms_interface = {}
        tabs = []

        for algorithm in upload_algorithms:
            adapter_template = flow_controller.get_adapter_template(project_id, algorithm.id, True, None)
            algorithms_interface['template_for_algo_' + str(algorithm.id)] = adapter_template
            tabs.append(OverlayTabDefinition(algorithm.displayname, algorithm.subsection_name,
                                             description=algorithm.description))

        template_specification = self.fill_overlay_attributes(None, "Upload", "Upload data for this project",
                                                              "project/upload_data_overlay", "dialog-upload",
                                                              tabs_vertical=tabs)
        template_specification['uploadAlgorithms'] = upload_algorithms
        template_specification['projectId'] = project_id
        template_specification['algorithmsInterface'] = algorithms_interface

        return flow_controller.fill_default_attributes(template_specification)


    @expose_fragment("overlay")
    def get_project_uploader_overlay(self):
        """
        Returns the html which displays a dialog which allows the user
        to upload an entire project.
        """
        template_specification = self.fill_overlay_attributes(None, "Upload", "Project structure",
                                                              "project/upload_project_overlay", "dialog-upload")

        return FlowController().fill_default_attributes(template_specification)


    @expose_page
    def launchloader(self, project_id, algorithm_id, cancel=False, **data):
        """ 
        Start Upload mechanism
        """
        success_link = "/project/editstructure/" + str(project_id)
        # do not allow GET
        if cherrypy.request.method != 'POST' or cancel:
            raise cherrypy.HTTPRedirect(success_link)
        try:
            int(project_id)
            int(algorithm_id)
        except (ValueError, TypeError):
            raise cherrypy.HTTPRedirect(success_link)

        project = self.project_service.find_project(project_id)
        algorithm = self.flow_service.get_algorithm_by_identifier(algorithm_id)
        FlowController().execute_post(project.id, success_link, algorithm.fk_category, algorithm, **data)

        raise cherrypy.HTTPRedirect(success_link)


    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    def readjsonstructure(self, project_id, visibility_filter=StaticFiltersFactory.FULL_VIEW,
                          first_level=None, second_level=None, filter_value=None):
        """
        AJAX exposed method. 
        Will return the complete JSON for Project's structure, or filtered tree
        (filter only Relevant entities or Burst only Data).
        """
        if first_level is None or second_level is None:
            first_level, second_level = self.get_project_structure_grouping()
        else:
            self.set_project_structure_grouping(first_level, second_level)

        selected_filter = StaticFiltersFactory.build_datatype_filters(single_filter=visibility_filter)

        project = self.project_service.find_project(project_id)
        json_structure = self.project_service.get_project_structure(project, selected_filter,
                                                                    first_level, second_level, filter_value)
        # This JSON encoding is necessary, otherwise we will get an error
        # from JSTree library while trying to load with AJAX 
        # the content of the tree.     
        encoder = JSONEncoder()
        return encoder.iterencode(json_structure)


    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    def createlink(self, link_data, project_id, is_group):
        """
        Delegate the creation of the actual link to the flow service.
        """
        if not string2bool(str(is_group)):
            self.flow_service.create_link([link_data], project_id)
        else:
            all_data = self.project_service.get_datatype_in_group(link_data)
            # Link all Dts in group and the DT_Group entity
            data_ids = [data.id for data in all_data]
            data_ids.append(int(link_data))
            self.flow_service.create_link(data_ids, project_id)


    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    def removelink(self, link_data, project_id, is_group):
        """
        Delegate the creation of the actual link to the flow service.
        """
        if not string2bool(str(is_group)):
            self.flow_service.remove_link(link_data, project_id)
        else:
            all_data = self.project_service.get_datatype_in_group(link_data)
            for data in all_data:
                self.flow_service.remove_link(data.id, project_id)
            self.flow_service.remove_link(int(link_data), project_id)


    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    def noderemove(self, project_id, node_gid):
        """
        AJAX exposed method, to execute operation of data removal.
        """
        try:
            if node_gid is None:
                return "Remove can only be applied on a Node with GID!"
            self.logger.debug("Removing data with GID=" + str(node_gid))
            self.project_service.remove_datatype(project_id, node_gid)
        except RemoveDataTypeException as excep:
            self.logger.exception("Could not execute operation Node Remove!")
            return excep.message
        except ServicesBaseException as excep:
            self.logger.exception("Could not execute operation Node Remove!")
            return excep.message
        return None


    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    def updatemetadata(self, **data):
        """ Submit MetaData edited for DataType(Group) or Operation(Group). """
        try:

            self.project_service.update_metadata(data)

        except ServicesBaseException as excep:
            self.logger.error("Could not execute MetaData update!")
            self.logger.exception(excep)
            common.set_error_message(excep.message)
            return excep.message


    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    def downloaddata(self, data_gid, export_module):
        """ Export the data to a default path of TVB_STORAGE/PROJECTS/project_name """
        current_prj = common.get_current_project()
        # Load data by GID
        entity = ABCAdapter.load_entity_by_gid(data_gid)
        # Do real export
        export_mng = ExportManager()
        file_name, file_path, delete_file = export_mng.export_data(entity, export_module, current_prj)
        if delete_file:
            # We force parent folder deletion because export process generated it.
            self.mark_file_for_delete(file_path, True)

        self.logger.debug("Data exported in file: " + str(file_path))
        return serve_file(file_path, "application/x-download", "attachment", file_name)


    @cherrypy.expose
    @handle_error(redirect=False)
    @check_user
    def downloadproject(self, project_id):
        """
        Export the data from a whole project.
        """
        current_project = self.project_service.find_project(project_id)
        export_mng = ExportManager()
        export_file = export_mng.export_project(current_project)

        # Register export file for delete when download complete
        # We force parent folder deletion because export process generated it.
        self.mark_file_for_delete(export_file, True)

        return serve_file(export_file, "application/x-download", "attachment")


    #methods related to data structure - graph

    @expose_json
    def create_json(self, item_gid, item_type, visibility_filter):
        """
        Method used for creating a JSON representation of a graph.
        """
        selected_filter = StaticFiltersFactory.build_datatype_filters(single_filter=visibility_filter)
        project = common.get_current_project()

        is_upload_operation = (item_type == graph_structures.NODE_OPERATION_TYPE) and \
                              (self.project_service.is_upload_operation(item_gid) or item_gid == "firstOperation")
        if is_upload_operation:
            graph_branches = []
            uploader_operations = self.project_service.get_all_operations_for_uploaders(project.id)
            for operation in uploader_operations:
                dt_outputs = self.project_service.get_results_for_operation(operation.id, selected_filter)
                dt_outputs = self._create_datatype_nodes(dt_outputs)
                parent_op = self._create_operation_nodes([operation], item_gid)
                branch = graph_structures.GraphComponent([], parent_op, dt_outputs, [])
                graph_branches.append(branch)
            graph = graph_structures.FullGraphStructure(graph_branches)
            return graph.prepare_for_json()

        dt_inputs, parent_op, dt_outputs, op_inputs = [], [], [], []
        if item_type == graph_structures.NODE_OPERATION_TYPE:
            dt_inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(item_gid, selected_filter)
            parent_op = self.project_service.load_operation_by_gid(item_gid)
            dt_outputs = self.project_service.get_results_for_operation(parent_op.id, selected_filter)
            #create graph nodes
            dt_inputs, parent_op, dt_outputs, op_inputs = self._create_nodes(dt_inputs, [parent_op],
                                                                             dt_outputs, [], item_gid)

        elif item_type == graph_structures.NODE_OPERATION_GROUP_TYPE:
            parent_op_group = self.project_service.get_operation_group_by_gid(item_gid)
            dt_inputs = self.project_service.get_datatypes_inputs_for_operation_group(parent_op_group.id,
                                                                                      selected_filter)
            datatype_group = self.project_service.get_datatypegroup_by_op_group_id(parent_op_group.id)
            datatype = self.project_service.get_datatype_by_id(datatype_group.id)

            dt_inputs = self._create_datatype_nodes(dt_inputs)
            parent_op = graph_structures.NodeStructure.build_structure_for_operation_group(parent_op_group.gid)
            parent_op.selected = True
            parent_op = [parent_op]
            if selected_filter.display_name == StaticFiltersFactory.RELEVANT_VIEW and datatype.visible is False:
                dt_outputs = []
            else:
                dt_outputs = self._create_datatype_nodes([datatype])

        elif item_type == graph_structures.NODE_DATATYPE_TYPE:
            selected_dt = ABCAdapter.load_entity_by_gid(item_gid)
            if self.project_service.is_datatype_group(item_gid):
                datatype_group = self.project_service.get_datatypegroup_by_gid(selected_dt.gid)
                parent_op_group = self.project_service.get_operation_group_by_id(datatype_group.fk_operation_group)
                dt_inputs = self.project_service.get_datatypes_inputs_for_operation_group(parent_op_group.id,
                                                                                          selected_filter)
                op_inputs = self.project_service.get_operations_for_datatype_group(selected_dt.id, selected_filter)
                op_inputs_in_groups = self.project_service.get_operations_for_datatype_group(selected_dt.id,
                                                                                             selected_filter,
                                                                                             only_in_groups=True)
                #create graph nodes
                dt_inputs, parent_op, dt_outputs, op_inputs = self._create_nodes(dt_inputs, [], [selected_dt],
                                                                                 op_inputs, item_gid)
                parent_op = [graph_structures.NodeStructure.build_structure_for_operation_group(parent_op_group.gid)]
                op_inputs_in_groups = self._create_operation_group_nodes(op_inputs_in_groups)
                op_inputs.extend(op_inputs_in_groups)
            else:
                parent_op = self.flow_service.load_operation(selected_dt.fk_from_operation)
                dt_inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(parent_op.gid,
                                                                                               selected_filter)
                op_inputs = self.project_service.get_operations_for_datatype(selected_dt.gid, selected_filter)
                op_inputs_in_groups = self.project_service.get_operations_for_datatype(selected_dt.gid, selected_filter,
                                                                                       only_in_groups=True)
                dt_outputs = self.project_service.get_results_for_operation(parent_op.id, selected_filter)
                #create graph nodes
                dt_inputs, parent_op, dt_outputs, op_inputs = self._create_nodes(dt_inputs, [parent_op], dt_outputs,
                                                                                 op_inputs, item_gid)
                op_inputs_in_groups = self._create_operation_group_nodes(op_inputs_in_groups)
                op_inputs.extend(op_inputs_in_groups)

        else:
            self.logger.error("Invalid item type: " + str(item_type))
            raise Exception("Invalid item type.")

        branch = graph_structures.GraphComponent(dt_inputs, parent_op, dt_outputs, op_inputs)
        graph = graph_structures.FullGraphStructure([branch])
        return graph.prepare_for_json()


    def _create_nodes(self, dt_inputs, parent_op, dt_outputs, op_inputs, item_gid=None):
        """Expected a list of DataTypes, Parent Operation, Outputs, and returns NodeStructure entities."""
        dt_inputs = self._create_datatype_nodes(dt_inputs, item_gid)
        parent_op = self._create_operation_nodes(parent_op, item_gid)
        dt_outputs = self._create_datatype_nodes(dt_outputs, item_gid)
        op_inputs = self._create_operation_nodes(op_inputs, item_gid)
        return dt_inputs, parent_op, dt_outputs, op_inputs


    @staticmethod
    def _create_datatype_nodes(datatypes_list, selected_item_gid=None):
        """ Expects a list of DataTypes and returns a list of NodeStructures """
        nodes = []
        if datatypes_list is None:
            return nodes
        for data_type in datatypes_list:
            node = graph_structures.NodeStructure.build_structure_for_datatype(data_type.gid)
            if data_type.gid == selected_item_gid:
                node.selected = True
            nodes.append(node)
        return nodes


    @staticmethod
    def _create_operation_nodes(operations_list, selected_item_gid=None):
        """
        Expects a list of operations and returns a list of NodeStructures
        """
        nodes = []
        for operation in operations_list:
            node = graph_structures.NodeStructure.build_structure_for_operation(operation)
            if operation.gid == selected_item_gid:
                node.selected = True
            nodes.append(node)
        return nodes


    def _create_operation_group_nodes(self, operations_list, selected_item_gid=None):
        """
        Expects a list of operations that are part of some operation groups.
        """
        groups = dict()
        for operation in operations_list:
            if operation.fk_operation_group not in groups:
                group = self.project_service.get_operation_group_by_id(operation.fk_operation_group)
                groups[group.id] = group.gid
        nodes = []
        for _, group in groups.items():
            node = graph_structures.NodeStructure.build_structure_for_operation_group(group)
            if group == selected_item_gid:
                node.selected = True
            nodes.append(node)
        return nodes


    def fill_default_attributes(self, template_dictionary, subsection='project'):
        """
        Overwrite base controller to add required parameters for adapter templates.
        """
        template_dictionary[common.KEY_SECTION] = 'project'
        template_dictionary[common.KEY_SUB_SECTION] = subsection
        template_dictionary[common.KEY_INCLUDE_RESOURCES] = 'project/included_resources'
        BaseController.fill_default_attributes(self, template_dictionary)
        return template_dictionary
class ProjectServiceTest(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.services.project_service module.
    """
    def setUp(self):
        """
        Reset the database before each test.
        """
        config.EVENTS_FOLDER = ''
        self.project_service = ProjectService()
        self.structure_helper = FilesHelper()
        self.test_user = TestFactory.create_user()

    def tearDown(self):
        """
        Remove project folders and clean up database.
        """
        created_projects = dao.get_projects_for_user(self.test_user.id)
        for project in created_projects:
            self.structure_helper.remove_project_structure(project.name)
        self.delete_project_folders()

    def test_create_project_happy_flow(self):
        """
        Standard flow for creating a new project.
        """
        user1 = TestFactory.create_user('test_user1')
        user2 = TestFactory.create_user('test_user2')
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0,
                         "Database reset probably failed!")
        TestFactory.create_project(self.test_user,
                                   'test_project',
                                   users=[user1.id, user2.id])
        resulting_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(resulting_projects), 1,
                         "Project with valid data not inserted!")
        project = resulting_projects[0]
        if project.name == "test_project":
            self.assertEqual(project.description, "description",
                             "Description do no match")
            users_for_project = dao.get_members_of_project(project.id)
            for user in users_for_project:
                self.assertTrue(user.id in [user1.id, user2.id],
                                "Users not stored properly.")
        self.assertTrue(
            os.path.exists(
                os.path.join(TvbProfile.current.TVB_STORAGE,
                             FilesHelper.PROJECTS_FOLDER, "test_project")),
            "Folder for project was not created")

    def test_create_project_empty_name(self):
        """
        Creating a project with an empty name.
        """
        data = dict(name="", description="test_description", users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0,
                         "Database reset probably failed!")
        self.assertRaises(ProjectServiceException,
                          self.project_service.store_project, self.test_user,
                          True, None, **data)

    def test_edit_project_happy_flow(self):
        """
        Standard flow for editing an existing project.
        """
        selected_project = TestFactory.create_project(self.test_user,
                                                      'test_proj')
        proj_root = self.structure_helper.get_project_folder(selected_project)
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 1,
                         "Database initialization probably failed!")

        edited_data = dict(name="test_project",
                           description="test_description",
                           users=[])
        edited_project = self.project_service.store_project(
            self.test_user, False, selected_project.id, **edited_data)
        self.assertFalse(os.path.exists(proj_root),
                         "Previous folder not deleted")
        proj_root = self.structure_helper.get_project_folder(edited_project)
        self.assertTrue(os.path.exists(proj_root), "New folder not created!")
        self.assertNotEqual(selected_project.name, edited_project.name,
                            "Project was no changed!")

    def test_edit_project_unexisting(self):
        """
        Trying to edit an un-existing project.
        """
        selected_project = TestFactory.create_project(self.test_user,
                                                      'test_proj')
        self.structure_helper.get_project_folder(selected_project)
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 1,
                         "Database initialization probably failed!")
        data = dict(name="test_project",
                    description="test_description",
                    users=[])
        self.assertRaises(ProjectServiceException,
                          self.project_service.store_project, self.test_user,
                          False, 99, **data)

    def test_find_project_happy_flow(self):
        """
        Standard flow for finding a project by it's id.
        """
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0,
                         "Database reset probably failed!")
        inserted_project = TestFactory.create_project(self.test_user,
                                                      'test_project')
        self.assertTrue(
            self.project_service.find_project(inserted_project.id) is not None,
            "Project not found !")
        dao_returned_project = dao.get_project_by_id(inserted_project.id)
        service_returned_project = self.project_service.find_project(
            inserted_project.id)
        self.assertEqual(
            dao_returned_project.id, service_returned_project.id,
            "Data returned from service is different from data returned by DAO."
        )
        self.assertEqual(
            dao_returned_project.name, service_returned_project.name,
            "Data returned from service is different than  data returned by DAO."
        )
        self.assertEqual(
            dao_returned_project.description,
            service_returned_project.description,
            "Data returned from service is different from data returned by DAO."
        )
        self.assertEqual(
            dao_returned_project.members, service_returned_project.members,
            "Data returned from service is different from data returned by DAO."
        )

    def test_find_project_unexisting(self):
        """
        Searching for an un-existing project.
        """
        data = dict(name="test_project",
                    description="test_description",
                    users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0,
                         "Database reset probably failed!")
        project = self.project_service.store_project(self.test_user, True,
                                                     None, **data)
        # fetch a likely non-existing project. Previous project id plus a 'big' offset
        self.assertRaises(ProjectServiceException,
                          self.project_service.find_project, project.id + 1033)

    def test_retrieve_projects_for_user(self):
        """
        Test for retrieving the projects for a given user. One page only.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        self.assertEqual(len(initial_projects), 0,
                         "Database was not reset properly!")
        TestFactory.create_project(self.test_user, 'test_proj')
        TestFactory.create_project(self.test_user, 'test_proj1')
        TestFactory.create_project(self.test_user, 'test_proj2')
        user1 = TestFactory.create_user('another_user')
        TestFactory.create_project(user1, 'test_proj3')
        projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
        for project in projects:
            self.assertNotEquals(
                project.name, "test_project3",
                "This project should not have been retrieved")

    def test_retrieve_1project_3usr(self):
        """
        One user as admin, two users as members, getting projects for admin and for any of
        the members should return one.
        """
        member1 = TestFactory.create_user("member1")
        member2 = TestFactory.create_user("member2")
        TestFactory.create_project(self.test_user,
                                   'Testproject',
                                   users=[member1.id, member2.id])
        projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 1)[0]
        self.assertEqual(len(projects), 1, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(
            member1.id, 1)[0]
        self.assertEqual(len(projects), 1, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(
            member2.id, 1)[0]
        self.assertEqual(len(projects), 1, "Projects not retrieved properly!")

    def test_retrieve_3projects_3usr(self):
        """
        Three users, 3 projects. Structure of db:
        proj1: {admin: user1, members: [user2, user3]}
        proj2: {admin: user2, members: [user1]}
        proj3: {admin: user3, members: [user1, user2]}
        Check valid project returns for all the users.
        """
        member1 = TestFactory.create_user("member1")
        member2 = TestFactory.create_user("member2")
        member3 = TestFactory.create_user("member3")
        TestFactory.create_project(member1,
                                   'TestProject1',
                                   users=[member2.id, member3.id])
        TestFactory.create_project(member2, 'TestProject2', users=[member1.id])
        TestFactory.create_project(member3,
                                   'TestProject3',
                                   users=[member1.id, member2.id])
        projects = self.project_service.retrieve_projects_for_user(
            member1.id, 1)[0]
        self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(
            member2.id, 1)[0]
        self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(
            member3.id, 1)[0]
        self.assertEqual(len(projects), 2, "Projects not retrieved properly!")

    def test_retrieve_projects_random(self):
        """
        Generate a large number of users/projects, and validate the results.
        """
        ExtremeTestFactory.generate_users(NR_USERS, MAX_PROJ_PER_USER)
        for i in range(NR_USERS):
            current_user = dao.get_user_by_name("gen" + str(i))
            expected_projects = ExtremeTestFactory.VALIDATION_DICT[
                current_user.id]
            if expected_projects % PROJECTS_PAGE_SIZE == 0:
                expected_pages = expected_projects / PROJECTS_PAGE_SIZE
                exp_proj_per_page = PROJECTS_PAGE_SIZE
            else:
                expected_pages = expected_projects / PROJECTS_PAGE_SIZE + 1
                exp_proj_per_page = expected_projects % PROJECTS_PAGE_SIZE
            if expected_projects == 0:
                expected_pages = 0
                exp_proj_per_page = 0
            projects, pages = self.project_service.retrieve_projects_for_user(
                current_user.id, expected_pages)
            self.assertEqual(
                len(projects), exp_proj_per_page,
                "Projects not retrieved properly! Expected:" +
                str(exp_proj_per_page) + "but got:" + str(len(projects)))
            self.assertEqual(pages, expected_pages,
                             "Pages not retrieved properly!")

        for folder in os.listdir(TvbProfile.current.TVB_STORAGE):
            full_path = os.path.join(TvbProfile.current.TVB_STORAGE, folder)
            if os.path.isdir(full_path) and folder.startswith('Generated'):
                shutil.rmtree(full_path)

    def test_retrieve_projects_page2(self):
        """
        Test for retrieving the second page projects for a given user.
        """
        for i in range(PROJECTS_PAGE_SIZE + 3):
            TestFactory.create_project(self.test_user, 'test_proj' + str(i))
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 2)
        self.assertEqual(len(projects),
                         (PROJECTS_PAGE_SIZE + 3) % PROJECTS_PAGE_SIZE,
                         "Pagination inproper.")
        self.assertEqual(pages, 2, 'Wrong number of pages retrieved.')

    def test_retrieve_projects_and_del(self):
        """
        Test for retrieving the second page projects for a given user.
        """
        created_projects = []
        for i in range(PROJECTS_PAGE_SIZE + 1):
            created_projects.append(
                TestFactory.create_project(self.test_user,
                                           'test_proj' + str(i)))
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 2)
        self.assertEqual(len(projects),
                         (PROJECTS_PAGE_SIZE + 1) % PROJECTS_PAGE_SIZE,
                         "Pagination improper.")
        self.assertEqual(pages,
                         (PROJECTS_PAGE_SIZE + 1) / PROJECTS_PAGE_SIZE + 1,
                         'Wrong number of pages')
        self.project_service.remove_project(created_projects[1].id)
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 2)
        self.assertEqual(len(projects), 0, "Pagination improper.")
        self.assertEqual(pages, 1, 'Wrong number of pages retrieved.')
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 1)
        self.assertEqual(len(projects), PROJECTS_PAGE_SIZE,
                         "Pagination improper.")
        self.assertEqual(pages, 1, 'Wrong number of pages retrieved.')

    def test_empty_project_has_zero_disk_size(self):
        TestFactory.create_project(self.test_user, 'test_proj')
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id)
        self.assertEqual(0, projects[0].disk_size)
        self.assertEqual('0.0 KiB', projects[0].disk_size_human)

    def test_project_disk_size(self):
        project1 = TestFactory.create_project(self.test_user, 'test_proj1')
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_66.zip')
        TestFactory.import_zip_connectivity(self.test_user, project1,
                                            'testSubject', zip_path)

        project2 = TestFactory.create_project(self.test_user, 'test_proj2')
        TestFactory.import_cff(test_user=self.test_user, test_project=project2)

        projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        self.assertNotEqual(projects[0].disk_size, projects[1].disk_size,
                            "projects should have different size")

        for project in projects:
            self.assertNotEqual(0, project.disk_size)
            self.assertNotEqual('0.0 KiB', project.disk_size_human)

            prj_folder = self.structure_helper.get_project_folder(project)
            actual_disk_size = self.compute_recursive_h5_disk_usage(
                prj_folder)[0]

            ratio = float(actual_disk_size) / project.disk_size
            msg = "Real disk usage: %s The one recorded in the db : %s" % (
                actual_disk_size, project.disk_size)
            self.assertTrue(ratio < 1.4, msg)

    def test_get_linkable_projects(self):
        """
        Test for retrieving the projects for a given user.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        self.assertEqual(len(initial_projects), 0, "Database was not reset!")
        test_proj = []
        user1 = TestFactory.create_user("another_user")
        for i in range(4):
            test_proj.append(
                TestFactory.create_project(self.test_user if i < 3 else user1,
                                           'test_proj' + str(i)))

        project_storage = self.structure_helper.get_project_folder(
            test_proj[0])

        operation = TestFactory.create_operation(test_user=self.test_user,
                                                 test_project=test_proj[0])

        project_storage = os.path.join(project_storage, str(operation.id))
        os.makedirs(project_storage)
        datatype = dao.store_entity(
            model.DataType(module="test_data",
                           subject="subj1",
                           state="test_state",
                           operation_id=operation.id))
        linkable = self.project_service.get_linkable_projects_for_user(
            self.test_user.id, str(datatype.id))[0]
        self.assertEqual(len(linkable), 2,
                         "Wrong count of link-able projects!")
        proj_names = [project.name for project in linkable]
        self.assertTrue(test_proj[1].name in proj_names)
        self.assertTrue(test_proj[2].name in proj_names)
        self.assertFalse(test_proj[3].name in proj_names)

    def test_remove_project_happy_flow(self):
        """
        Standard flow for deleting a project.
        """
        inserted_project = TestFactory.create_project(self.test_user,
                                                      'test_proj')
        project_root = self.structure_helper.get_project_folder(
            inserted_project)
        projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(projects), 1, "Initializations failed!")
        self.assertTrue(os.path.exists(project_root),
                        "Something failed at insert time!")
        self.project_service.remove_project(inserted_project.id)
        projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(projects), 0, "Project was not deleted!")
        self.assertFalse(os.path.exists(project_root),
                         "Root folder not deleted!")

    def test_remove_project_wrong_id(self):
        """
        Flow for deleting a project giving an un-existing id.
        """
        TestFactory.create_project(self.test_user, 'test_proj')
        projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(projects), 1, "Initializations failed!")
        self.assertRaises(ProjectServiceException,
                          self.project_service.remove_project, 99)

    @staticmethod
    def _create_value_wrapper(test_user, test_project=None):
        """
        Creates a ValueWrapper dataType, and the associated parent Operation.
        This is also used in ProjectStructureTest.
        """
        if test_project is None:
            test_project = TestFactory.create_project(test_user, 'test_proj')
        operation = TestFactory.create_operation(test_user=test_user,
                                                 test_project=test_project)
        value_wrapper = ValueWrapper(data_value=5.0, data_name="my_value")
        value_wrapper.type = "ValueWrapper"
        value_wrapper.module = "tvb.datatypes.mapped_values"
        value_wrapper.subject = "John Doe"
        value_wrapper.state = "RAW_STATE"
        value_wrapper.set_operation_id(operation.id)
        adapter_instance = StoreAdapter([value_wrapper])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        all_value_wrappers = FlowService().get_available_datatypes(
            test_project.id, "tvb.datatypes.mapped_values.ValueWrapper")[0]
        if len(all_value_wrappers) != 1:
            raise Exception("Should be only one value wrapper.")
        result_vw = ABCAdapter.load_entity_by_gid(all_value_wrappers[0][2])
        return test_project, result_vw.gid, operation.gid

    def __check_meta_data(self, expected_meta_data, new_datatype):
        """Validate Meta-Data"""
        mapp_keys = {
            DataTypeMetaData.KEY_SUBJECT: "subject",
            DataTypeMetaData.KEY_STATE: "state"
        }
        for key, value in expected_meta_data.iteritems():
            if key in mapp_keys:
                self.assertEqual(value, getattr(new_datatype, mapp_keys[key]))
            elif key == DataTypeMetaData.KEY_OPERATION_TAG:
                if DataTypeMetaData.KEY_OP_GROUP_ID in expected_meta_data:
                    ## We have a Group to check
                    op_group = new_datatype.parent_operation.fk_operation_group
                    op_group = dao.get_generic_entity(model.OperationGroup,
                                                      op_group)[0]
                    self.assertEqual(value, op_group.name)
                else:
                    self.assertEqual(value,
                                     new_datatype.parent_operation.user_group)

    def test_remove_project_node(self):
        """
        Test removing of a node from a project.
        """
        inserted_project, gid, gid_op = self._create_value_wrapper(
            self.test_user)
        project_to_link = model.Project("Link", self.test_user.id, "descript")
        project_to_link = dao.store_entity(project_to_link)
        exact_data = dao.get_datatype_by_gid(gid)
        dao.store_entity(model.Links(exact_data.id, project_to_link.id))
        self.assertTrue(
            dao.get_datatype_by_gid(gid) is not None,
            "Initialization problem!")

        operation_id = dao.get_generic_entity(model.Operation, gid_op,
                                              'gid')[0].id
        op_folder = self.structure_helper.get_project_folder(
            "test_proj", str(operation_id))
        self.assertTrue(os.path.exists(op_folder))
        sub_files = os.listdir(op_folder)
        self.assertEqual(2, len(sub_files))
        ### Validate that no more files are created than needed.

        self.project_service._remove_project_node_files(
            inserted_project.id, gid)
        sub_files = os.listdir(op_folder)
        self.assertEqual(1, len(sub_files))
        ### operation.xml file should still be there

        op_folder = self.structure_helper.get_project_folder(
            "Link", str(operation_id + 1))
        sub_files = os.listdir(op_folder)
        self.assertEqual(2, len(sub_files))
        self.assertTrue(
            dao.get_datatype_by_gid(gid) is not None,
            "Data should still be in DB, because of links")
        self.project_service._remove_project_node_files(
            project_to_link.id, gid)
        self.assertTrue(dao.get_datatype_by_gid(gid) is None)
        sub_files = os.listdir(op_folder)
        self.assertEqual(1, len(sub_files))
        ### operation.xml file should still be there

    def test_update_meta_data_simple(self):
        """
        Test the new update metaData for a simple data that is not part of a group.
        """
        inserted_project, gid, _ = self._create_value_wrapper(self.test_user)
        new_meta_data = {
            DataTypeOverlayDetails.DATA_SUBJECT: "new subject",
            DataTypeOverlayDetails.DATA_STATE: "second_state",
            DataTypeOverlayDetails.CODE_GID: gid,
            DataTypeOverlayDetails.CODE_OPERATION_TAG: 'new user group'
        }
        self.project_service.update_metadata(new_meta_data)

        new_datatype = dao.get_datatype_by_gid(gid)
        self.__check_meta_data(new_meta_data, new_datatype)

        op_path = FilesHelper().get_operation_meta_file_path(
            inserted_project.name, new_datatype.parent_operation.id)
        op_meta = XMLReader(op_path).read_metadata()
        self.assertEqual(op_meta['user_group'], 'new user group',
                         'UserGroup not updated!')

    def test_update_meta_data_group(self):
        """
        Test the new update metaData for a group of dataTypes.
        """
        datatypes, group_id = TestFactory.create_group(
            self.test_user, subject="test-subject-1")

        new_meta_data = {
            DataTypeOverlayDetails.DATA_SUBJECT: "new subject",
            DataTypeOverlayDetails.DATA_STATE: "updated_state",
            DataTypeOverlayDetails.CODE_OPERATION_GROUP_ID: group_id,
            DataTypeOverlayDetails.CODE_OPERATION_TAG: 'newGroupName'
        }
        self.project_service.update_metadata(new_meta_data)

        for datatype in datatypes:
            new_datatype = dao.get_datatype_by_id(datatype.id)
            self.assertEqual(group_id,
                             new_datatype.parent_operation.fk_operation_group)
            new_group = dao.get_generic_entity(model.OperationGroup,
                                               group_id)[0]
            self.assertEqual(new_group.name, "newGroupName")
            self.__check_meta_data(new_meta_data, new_datatype)

    def _create_datatypes(self, dt_factory, nr_of_dts):
        for idx in range(nr_of_dts):
            dt = Datatype1()
            dt.row1 = "value%i" % (idx, )
            dt.row2 = "value%i" % (idx + 1, )
            dt_factory._store_datatype(dt)

    def test_retrieve_project_full(self):
        """
        Tests full project information is retrieved by method `ProjectService.retrieve_project_full(...)`
        """
        dt_factory = datatypes_factory.DatatypesFactory()
        self._create_datatypes(dt_factory, 3)
        _, ops_nr, operations, pages_no = self.project_service.retrieve_project_full(
            dt_factory.project.id)
        self.assertEqual(
            ops_nr, 1,
            "DataType Factory should only use one operation to store all it's datatypes."
        )
        self.assertEqual(
            pages_no, 1,
            "DataType Factory should only use one operation to store all it's datatypes."
        )
        resulted_dts = operations[0]['results']
        self.assertEqual(len(resulted_dts), 3,
                         "3 datatypes should be created.")

    def test_get_project_structure(self):
        """
        Tests project structure is as expected and contains all datatypes
        """
        SELF_DTS_NUMBER = 3
        dt_factory_1 = datatypes_factory.DatatypesFactory()
        self._create_datatypes(dt_factory_1, SELF_DTS_NUMBER)
        dt_group = dt_factory_1.create_datatype_group()

        link_ids, expected_links = [], []
        # Prepare link towards a simple DT
        dt_factory_2 = datatypes_factory.DatatypesFactory()
        dt_to_link = dt_factory_2.create_simple_datatype()
        link_ids.append(dt_to_link.id)
        expected_links.append(dt_to_link.gid)

        # Prepare links towards a full DT Group, but expecting only the DT_Group in the final tree
        link_gr = dt_factory_2.create_datatype_group()
        dts = dao.get_datatype_in_group(datatype_group_id=link_gr.id)
        link_ids.extend([dt_to_link.id for dt_to_link in dts])
        link_ids.append(link_gr.id)
        expected_links.append(link_gr.gid)

        # Prepare link towards a single DT inside a group, and expecting to find the DT in the final tree
        link_gr = dt_factory_2.create_datatype_group()
        dt_to_link = dao.get_datatype_in_group(datatype_group_id=link_gr.id)[0]
        link_ids.append(dt_to_link.id)
        expected_links.append(dt_to_link.gid)

        # Actually create the links from Prj2 into Prj1
        FlowService().create_link(link_ids, dt_factory_1.project.id)

        # Retrieve the raw data used to compose the tree (for easy parsing)
        dts_in_tree = dao.get_data_in_project(dt_factory_1.project.id)
        dts_in_tree = [dt.gid for dt in dts_in_tree]
        # Retrieve the tree json (for trivial validations only, as we can not decode)
        node_json = self.project_service.get_project_structure(
            dt_factory_1.project, None, DataTypeMetaData.KEY_STATE,
            DataTypeMetaData.KEY_SUBJECT, None)

        self.assertEqual(
            len(expected_links) + SELF_DTS_NUMBER + 2, len(dts_in_tree),
            "invalid number of nodes in tree")
        self.assertFalse(
            link_gr.gid in dts_in_tree,
            "DT_group where a single DT is linked is not expected.")
        self.assertTrue(dt_group.gid in dts_in_tree,
                        "DT_Group should be in the Project Tree!")
        self.assertTrue(dt_group.gid in node_json,
                        "DT_Group should be in the Project Tree JSON!")

        project_dts = dao.get_datatypes_in_project(dt_factory_1.project.id)
        for dt in project_dts:
            if dt.fk_datatype_group is not None:
                self.assertFalse(dt.gid in node_json,
                                 "DTs part of a group should not be")
                self.assertFalse(dt.gid in dts_in_tree,
                                 "DTs part of a group should not be")
            else:
                self.assertTrue(dt.gid in node_json,
                                "Simple DTs and DT_Groups should be")
                self.assertTrue(dt.gid in dts_in_tree,
                                "Simple DTs and DT_Groups should be")

        for link_gid in expected_links:
            self.assertTrue(link_gid in node_json, "Expected Link not present")
            self.assertTrue(link_gid in dts_in_tree,
                            "Expected Link not present")