Пример #1
0
    def export_project(self, project):
        """
        Given a project root and the TVB storage_path, create a ZIP
        ready for export.
        :param project: project object which identifies project to be exported
        :param project)name: name of the project to be exported
        :param export_folder: folder where to store export result( e.g zip file)
        """
        if project is None:
            raise ExportException("Please provide project to be exported")

        files_helper = FilesHelper()
        project_folder = files_helper.get_project_folder(project)

        bursts_dict = {}
        datatype_burst_mapping = {}
        bursts_count = dao.get_bursts_for_project(project.id, count=True)
        for start_idx in range(0, bursts_count, BURST_PAGE_SIZE):
            bursts = dao.get_bursts_for_project(project.id,
                                                page_start=start_idx,
                                                page_end=start_idx +
                                                BURST_PAGE_SIZE)
            for burst in bursts:
                self._build_burst_export_dict(burst, bursts_dict)

        datatypes_count = dao.get_datatypes_for_project(project.id, count=True)
        for start_idx in range(0, datatypes_count, DATAYPES_PAGE_SIZE):
            datatypes = dao.get_datatypes_for_project(project.id,
                                                      page_start=start_idx,
                                                      page_end=start_idx +
                                                      DATAYPES_PAGE_SIZE)
            for datatype in datatypes:
                datatype_burst_mapping[datatype.gid] = datatype.fk_parent_burst

        # Compute path and name of the zip file
        now = datetime.now()
        date_str = now.strftime("%Y-%m-%d_%H-%M")
        zip_file_name = "%s_%s.%s" % (date_str, project.name,
                                      self.ZIP_FILE_EXTENSION)

        export_folder = self._build_data_export_folder(project)
        result_path = os.path.join(export_folder, zip_file_name)

        bursts_file_name = os.path.join(project_folder, BURST_INFO_FILE)
        burst_info = {
            BURSTS_DICT_KEY: bursts_dict,
            DT_BURST_MAP: datatype_burst_mapping
        }
        with open(bursts_file_name, 'w') as bursts_file:
            bursts_file.write(json.dumps(burst_info))

        # pack project content into a ZIP file
        result_zip = files_helper.zip_folder(result_path, project_folder)

        # remove these files, since we only want them in export archive
        os.remove(bursts_file_name)
        return result_zip
Пример #2
0
def get_gifty_file_name(project_id, desired_name):
    """
    Compute non-existent file name, in the TEMP folder of
    the given project.
    Try desired_name, and if already exists, try adding a number.
    """
    if project_id:
        project = dao.get_project_by_id(project_id)
        file_helper = FilesHelper()
        temp_path = file_helper.get_project_folder(project,
                                                   FilesHelper.TEMP_FOLDER)
        return get_unique_file_name(temp_path, desired_name)[0]
    return get_unique_file_name(cfg.TVB_STORAGE, desired_name)[0]
Пример #3
0
class DTITest(TransactionalTestCase):
    """
    Test basic functionality of DTI Import Service.
    """
    ### First dataSet
    FILE_1 = os.path.join(os.path.dirname(current_pack.__file__), "data",
                          "TVB_ConnectionCapacityMatrix.csv")
    FILE_2 = os.path.join(os.path.dirname(current_pack.__file__), "data",
                          "TVB_ConnectionDistanceMatrix.csv")
    ### Second dataSet
    FILE_3 = os.path.join(os.path.dirname(current_pack.__file__), "data",
                          "TVB_ConnectionCapacityMatrix_3.csv")
    FILE_4 = os.path.join(os.path.dirname(current_pack.__file__), "data",
                          "TVB_ConnectionDistanceMatrix_3.csv")

    def setUp(self):
        """
        Reset the database before each test.
        """
        #        self.clean_database()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)
        self.service = DTIPipelineService('127.0.0.1', 'root')
        self.helper = FilesHelper()

    def test_process_csv(self):
        """
        Test that a CSV generated on the server is correctly processed.
        """

        folder = self.helper.get_project_folder(self.test_project, "TEMP")

        for file_name in [self.FILE_1, self.FILE_2, self.FILE_3, self.FILE_4]:

            intermediate_file = os.path.join(folder,
                                             os.path.split(file_name)[1])
            self.helper.copy_file(file_name, intermediate_file)
            result_file = 'weights.txt' if 'Capacity' in file_name else 'tracts.txt'
            result_file = os.path.join(folder, result_file)
            self.service._process_csv_file(intermediate_file, result_file)
            matrix = read_list_data(result_file)
            self.assertEqual(96, len(matrix))
            self.assertEqual(96, len(matrix[0]))
Пример #4
0
    def export(self, data, export_folder, project):
        """
            Exports data type
            1. If data is a normal data type, simply exports storage file (HDF format)
            2. If data is a DataTypeGroup creates a zip with all files for all data types
        """
        download_file_name = self.get_export_file_name(data)
        files_helper = FilesHelper()

        if self.is_data_a_group(data):
            all_datatypes = self._get_all_data_types_arr(data)

            if all_datatypes is None or len(all_datatypes) == 0:
                raise ExportException(
                    "Could not export a data type group with no data")

            zip_file = os.path.join(export_folder, download_file_name)

            # Now process each data type from group and add it to ZIP file
            operation_folders = []
            for data_type in all_datatypes:
                operation_folder = files_helper.get_operation_folder(
                    project.name, data_type.fk_from_operation)
                operation_folders.append(operation_folder)

            # Create ZIP archive
            files_helper.zip_folders(zip_file, operation_folders,
                                     self.OPERATION_FOLDER_PREFIX)

            return (download_file_name, zip_file, True)
        else:
            project_folder = files_helper.get_project_folder(project)
            data_file = os.path.join(project_folder,
                                     data.get_storage_file_path())

            return (download_file_name, data_file, False)
Пример #5
0
class FilesHelperTest(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.entities.file.fileshelper module.
    """ 
    PROJECT_NAME = "test_proj"
           
           
    def setUp(self):
        """
        Set up the context needed by the tests.
        """
#        self.clean_database()
        self.files_helper = FilesHelper()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user, self.PROJECT_NAME)
    
    
    def tearDown(self):
        """ Remove generated project during tests. """
        self.delete_project_folders()
    
    
    def test_check_created(self):
        """ Test standard flows for check created. """
        self.files_helper.check_created()
        self.assertTrue(os.path.exists(root_storage), "Storage not created!")
        
        self.files_helper.check_created(os.path.join(root_storage, "test"))
        self.assertTrue(os.path.exists(root_storage), "Storage not created!")
        self.assertTrue(os.path.exists(os.path.join(root_storage, "test")), "Test directory not created!")
            
    
    def test_get_project_folder(self):
        """
        Test the get_project_folder method which should create a folder in case
        it doesn't already exist.
        """
        project_path = self.files_helper.get_project_folder(self.test_project)
        self.assertTrue(os.path.exists(project_path), "Folder doesn't exist")
        
        folder_path = self.files_helper.get_project_folder(self.test_project, "43")
        self.assertTrue(os.path.exists(project_path), "Folder doesn't exist")
        self.assertTrue(os.path.exists(folder_path), "Folder doesn't exist")
        
   
    def test_rename_project_structure(self):
        """ Try to rename the folder structure of a project. Standard flow. """
        self.files_helper.get_project_folder(self.test_project)
        path, name = self.files_helper.rename_project_structure(self.test_project.name, "new_name")
        self.assertNotEqual(path, name, "Rename didn't take effect.")


    def test_rename_structure_same_name(self):
        """ Try to rename the folder structure of a project. Same name. """
        self.files_helper.get_project_folder(self.test_project)
        
        self.assertRaises(FileStructureException, self.files_helper.rename_project_structure, 
                          self.test_project.name, self.PROJECT_NAME)


    def test_remove_project_structure(self):
        """ Check that remove project structure deletes the corresponding folder. Standard flow. """
        full_path = self.files_helper.get_project_folder(self.test_project)
        self.assertTrue(os.path.exists(full_path), "Folder was not created.")
        
        self.files_helper.remove_project_structure(self.test_project.name)
        self.assertFalse(os.path.exists(full_path), "Project folder not deleted.")
        
    
    def test_write_project_metadata(self):
        """  Write XML for test-project. """
        self.files_helper.write_project_metadata(self.test_project)
        expected_file = self.files_helper.get_project_meta_file_path(self.PROJECT_NAME)
        self.assertTrue(os.path.exists(expected_file))
        project_meta = XMLReader(expected_file).read_metadata()
        loaded_project = model.Project(None, None)
        loaded_project.from_dict(project_meta, self.test_user.id)
        self.assertEqual(self.test_project.name, loaded_project.name)
        self.assertEqual(self.test_project.description, loaded_project.description)
        self.assertEqual(self.test_project.gid, loaded_project.gid)
        expected_dict = self.test_project.to_dict()[1]
        del expected_dict['last_updated']
        found_dict = loaded_project.to_dict()[1]
        del found_dict['last_updated']
        self.assertEqual(expected_dict, found_dict)
    
    
    def test_write_operation_metadata(self):
        """
        Test that a correct XML is created for an operation.
        """
        operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
        expected_file = self.files_helper.get_operation_meta_file_path(self.PROJECT_NAME, operation.id)
        self.assertFalse(os.path.exists(expected_file))
        self.files_helper.write_operation_metadata(operation)
        self.assertTrue(os.path.exists(expected_file))
        operation_meta = XMLReader(expected_file).read_metadata()
        loaded_operation = model.Operation(None, None, None, None)
        loaded_operation.from_dict(operation_meta, dao)
        expected_dict = operation.to_dict()[1]
        found_dict = loaded_operation.to_dict()[1]
        for key, value in expected_dict.iteritems():
            self.assertEqual(str(value), str(found_dict[key]))
        # Now validate that operation metaData can be also updated
        self.assertNotEqual("new_group_name", found_dict['user_group'])
        self.files_helper.update_operation_metadata(self.PROJECT_NAME, "new_group_name", operation.id) 
        found_dict = XMLReader(expected_file).read_metadata()  
        self.assertEqual("new_group_name", found_dict['user_group'])
        
    
    def test_remove_dt_happy_flow(self):
        """
        Happy flow for removing a file related to a DataType.
        """
        folder_path = self.files_helper.get_project_folder(self.test_project, "42")
        datatype = MappedType()
        datatype.storage_path = folder_path
        open(datatype.get_storage_file_path(), 'w') 
        self.assertTrue(os.path.exists(datatype.get_storage_file_path()), "Test file was not created!")
        self.files_helper.remove_datatype(datatype) 
        self.assertFalse(os.path.exists(datatype.get_storage_file_path()), "Test file was not deleted!")      
        
        
    def test_remove_dt_non_existent(self):
        """
        Try to call remove on a dataType with no H5 file.
        Should throw an exception.
        """
        folder_path = self.files_helper.get_project_folder(self.test_project, "42")
        datatype = MappedType()
        datatype.storage_path = folder_path
        self.assertFalse(os.path.exists(datatype.get_storage_file_path()))
        self.assertRaises(FileStructureException, self.files_helper.remove_datatype, datatype)
        

    def test_move_datatype(self):
        """
        Make sure associated H5 file is moved to a correct new location.
        """
        folder_path = self.files_helper.get_project_folder(self.test_project, "42")
        datatype = MappedType()
        datatype.storage_path = folder_path
        open(datatype.get_storage_file_path(), 'w') 
        self.assertTrue(os.path.exists(datatype.get_storage_file_path()), "Test file was not created!")
        self.files_helper.move_datatype(datatype, self.PROJECT_NAME + '11', "43") 
        
        self.assertFalse(os.path.exists(datatype.get_storage_file_path()), "Test file was not moved!")
        datatype.storage_path = self.files_helper.get_project_folder(self.PROJECT_NAME + '11', "43")
        self.assertTrue(os.path.exists(datatype.get_storage_file_path()), "Test file was not created!")
        
        
    def test_find_relative_path(self):
        rel_path = self.files_helper.find_relative_path("/root/up/to/here/test/it/now", "/root/up/to/here")
        self.assertEqual(rel_path, os.sep.join(["test", "it", "now"]), "Did not extract relative path as expected.")
        
        
    def test_remove_files_valid(self):
        """
        Pass a valid list of files and check they are all removed.
        """
        file_list = ["test1", "test2", "test3"]
        for file_n in file_list:
            fp = open(file_n, 'w')
            fp.write('test')
            fp.close()
        for file_n in file_list:
            self.assertTrue(os.path.isfile(file_n))
        self.files_helper.remove_files(file_list)
        for file_n in file_list:
            self.assertFalse(os.path.isfile(file_n))


    def test_remove_folder(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        os.mkdir(folder_name)
        self.assertTrue(os.path.isdir(folder_name), "Folder should be created.")
        self.files_helper.remove_folder(folder_name)
        self.assertFalse(os.path.isdir(folder_name), "Folder should be deleted.")
        
    def test_remove_folder_non_existing_ignore_exc(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        self.assertFalse(os.path.isdir(folder_name), "Folder should not exist before call.")
        self.files_helper.remove_folder(folder_name, ignore_errors=True)
        
        
    def test_remove_folder_non_existing(self):
        """
        Pass an open file pointer, but ignore exceptions.
        """
        folder_name = "test_folder"
        self.assertFalse(os.path.isdir(folder_name), "Folder should not exist before call.")
        self.assertRaises(FileStructureException, self.files_helper.remove_folder, folder_name, False)
class ProjectServiceTest(TransactionalTestCase):
    """
    This class contains tests for the tvb.core.services.projectservice module.
    """
    def setUp(self):
        """
        Reset the database before each test.
        """
        EVENTS_FOLDER = ''
        #        self.clean_database()
        self.project_service = ProjectService()
        self.structure_helper = FilesHelper()
        self.test_user = TestFactory.create_user()

    def tearDown(self):
        """
        Remove project folders and clean up database.
        """
        created_projects = dao.get_projects_for_user(self.test_user.id)
        for project in created_projects:
            self.structure_helper.remove_project_structure(project.name)
        self.delete_project_folders()


#        self.clean_database()

    def test_create_project_happy_flow(self):
        """
        Standard flow for creating a new project.
        """
        user1 = TestFactory.create_user('test_user1')
        user2 = TestFactory.create_user('test_user2')
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0,
                         "Database reset probably failed!")
        TestFactory.create_project(self.test_user,
                                   'test_project',
                                   users=[user1.id, user2.id])
        resulting_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(resulting_projects), 1,
                         "Project with valid data not inserted!")
        project = resulting_projects[0]
        if project.name == "test_project":
            self.assertEqual(project.description, "description",
                             "Description do no match")
            users_for_project = dao.get_members_of_project(project.id)
            for user in users_for_project:
                self.assertTrue(user.id in [user1.id, user2.id],
                                "Users not stored properly.")
        self.assertTrue(
            os.path.exists(
                os.path.join(cfg.TVB_STORAGE, FilesHelper.PROJECTS_FOLDER,
                             "test_project")),
            "Folder for project was not created")

    def test_create_project_empty_name(self):
        """
        Creating a project with an empty name.
        """
        data = dict(name="", description="test_description", users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0,
                         "Database reset probably failed!")
        self.assertRaises(ProjectServiceException,
                          self.project_service.store_project, self.test_user,
                          True, None, **data)

    def test_edit_project_happy_flow(self):
        """
        Standard flow for editing an existing project.
        """
        selected_project = TestFactory.create_project(self.test_user,
                                                      'test_proj')
        proj_root = self.structure_helper.get_project_folder(selected_project)
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 1,
                         "Database initialization probably failed!")

        edited_data = dict(name="test_project",
                           description="test_description",
                           users=[])
        edited_project = self.project_service.store_project(
            self.test_user, False, selected_project.id, **edited_data)
        self.assertFalse(os.path.exists(proj_root),
                         "Previous folder not deleted")
        proj_root = self.structure_helper.get_project_folder(edited_project)
        self.assertTrue(os.path.exists(proj_root), "New folder not created!")
        self.assertNotEqual(selected_project.name, edited_project.name,
                            "Project was no changed!")

    def test_edit_project_unexisting(self):
        """
        Trying to edit an un-existing project.
        """
        selected_project = TestFactory.create_project(self.test_user,
                                                      'test_proj')
        self.structure_helper.get_project_folder(selected_project)
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 1,
                         "Database initialization probably failed!")
        data = dict(name="test_project",
                    description="test_description",
                    users=[])
        self.assertRaises(ProjectServiceException,
                          self.project_service.store_project, self.test_user,
                          False, 99, **data)

    def test_find_project_happy_flow(self):
        """
        Standard flow for finding a project by it's id.
        """
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0,
                         "Database reset probably failed!")
        inserted_project = TestFactory.create_project(self.test_user,
                                                      'test_project')
        self.assertTrue(
            self.project_service.find_project(inserted_project.id) is not None,
            "Project not found !")
        dao_returned_project = dao.get_project_by_id(inserted_project.id)
        service_returned_project = self.project_service.find_project(
            inserted_project.id)
        self.assertEqual(
            dao_returned_project.id, service_returned_project.id,
            "Data returned from service is different from data returned by DAO."
        )
        self.assertEqual(
            dao_returned_project.name, service_returned_project.name,
            "Data returned from service is different than  data returned by DAO."
        )
        self.assertEqual(
            dao_returned_project.description,
            service_returned_project.description,
            "Data returned from service is different from data returned by DAO."
        )
        self.assertEqual(
            dao_returned_project.members, service_returned_project.members,
            "Data returned from service is different from data returned by DAO."
        )

    def test_find_project_unexisting(self):
        """
        Searching for an un-existing project.
        """
        data = dict(name="test_project",
                    description="test_description",
                    users=[])
        initial_projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(initial_projects), 0,
                         "Database reset probably failed!")
        self.project_service.store_project(self.test_user, True, None, **data)
        self.assertRaises(ProjectServiceException,
                          self.project_service.find_project, 99)

    def test_retrieve_projects_for_user(self):
        """
        Test for retrieving the projects for a given user. One page only.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        self.assertEqual(len(initial_projects), 0,
                         "Database was not reset properly!")
        TestFactory.create_project(self.test_user, 'test_proj')
        TestFactory.create_project(self.test_user, 'test_proj1')
        TestFactory.create_project(self.test_user, 'test_proj2')
        user1 = TestFactory.create_user('another_user')
        TestFactory.create_project(user1, 'test_proj3')
        projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
        for project in projects:
            self.assertNotEquals(
                project.name, "test_project3",
                "This project should not have been retrieved")

    def test_retrieve_1project_3usr(self):
        """
        One user as admin, two users as members, getting projects for admin and for any of
        the members should return one.
        """
        member1 = TestFactory.create_user("member1")
        member2 = TestFactory.create_user("member2")
        TestFactory.create_project(self.test_user,
                                   'Testproject',
                                   users=[member1.id, member2.id])
        projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 1)[0]
        self.assertEqual(len(projects), 1, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(
            member1.id, 1)[0]
        self.assertEqual(len(projects), 1, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(
            member2.id, 1)[0]
        self.assertEqual(len(projects), 1, "Projects not retrieved properly!")

    def test_retrieve_3projects_3usr(self):
        """
        Three users, 3 projects. Structure of db:
        proj1: {admin: user1, members: [user2, user3]}
        proj2: {admin: user2, members: [user1]}
        proj3: {admin: user3, members: [user1, user2]}
        Check valid project returns for all the users.
        """
        member1 = TestFactory.create_user("member1")
        member2 = TestFactory.create_user("member2")
        member3 = TestFactory.create_user("member3")
        TestFactory.create_project(member1,
                                   'TestProject1',
                                   users=[member2.id, member3.id])
        TestFactory.create_project(member2, 'TestProject2', users=[member1.id])
        TestFactory.create_project(member3,
                                   'TestProject3',
                                   users=[member1.id, member2.id])
        projects = self.project_service.retrieve_projects_for_user(
            member1.id, 1)[0]
        self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(
            member2.id, 1)[0]
        self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
        projects = self.project_service.retrieve_projects_for_user(
            member3.id, 1)[0]
        self.assertEqual(len(projects), 2, "Projects not retrieved properly!")

    def test_retrieve_projects_random(self):
        """
        Generate a large number of users/projects, and validate the results.
        """
        ExtremeTestFactory.generate_users(NR_USERS, MAX_PROJ_PER_USER)
        for i in range(NR_USERS):
            current_user = dao.get_user_by_name("gen" + str(i))
            expected_projects = ExtremeTestFactory.VALIDATION_DICT[
                current_user.id]
            if expected_projects % PROJECTS_PAGE_SIZE == 0:
                expected_pages = expected_projects / PROJECTS_PAGE_SIZE
                exp_proj_per_page = PROJECTS_PAGE_SIZE
            else:
                expected_pages = expected_projects / PROJECTS_PAGE_SIZE + 1
                exp_proj_per_page = expected_projects % PROJECTS_PAGE_SIZE
            if expected_projects == 0:
                expected_pages = 0
                exp_proj_per_page = 0
            projects, pages = self.project_service.retrieve_projects_for_user(
                current_user.id, expected_pages)
            self.assertEqual(
                len(projects), exp_proj_per_page,
                "Projects not retrieved properly! Expected:" +
                str(exp_proj_per_page) + "but got:" + str(len(projects)))
            self.assertEqual(pages, expected_pages,
                             "Pages not retrieved properly!")
        for folder in os.listdir(cfg.TVB_STORAGE):
            full_path = os.path.join(cfg.TVB_STORAGE, folder)
            if os.path.isdir(full_path) and folder.startswith('Generated'):
                shutil.rmtree(full_path)

    def test_retrieve_projects_page2(self):
        """
        Test for retrieving the second page projects for a given user.
        """
        for i in range(PROJECTS_PAGE_SIZE + 3):
            TestFactory.create_project(self.test_user, 'test_proj' + str(i))
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 2)
        self.assertEqual(len(projects),
                         (PROJECTS_PAGE_SIZE + 3) % PROJECTS_PAGE_SIZE,
                         "Pagination inproper.")
        self.assertEqual(pages, 2, 'Wrong number of pages retrieved.')

    def test_retrieve_projects_and_del(self):
        """
        Test for retrieving the second page projects for a given user.
        """
        created_projects = []
        for i in range(PROJECTS_PAGE_SIZE + 1):
            created_projects.append(
                TestFactory.create_project(self.test_user,
                                           'test_proj' + str(i)))
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 2)
        self.assertEqual(len(projects),
                         (PROJECTS_PAGE_SIZE + 1) % PROJECTS_PAGE_SIZE,
                         "Pagination improper.")
        self.assertEqual(pages,
                         (PROJECTS_PAGE_SIZE + 1) / PROJECTS_PAGE_SIZE + 1,
                         'Wrong number of pages')
        self.project_service.remove_project(created_projects[1].id)
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 2)
        self.assertEqual(len(projects), 0, "Pagination improper.")
        self.assertEqual(pages, 1, 'Wrong number of pages retrieved.')
        projects, pages = self.project_service.retrieve_projects_for_user(
            self.test_user.id, 1)
        self.assertEqual(len(projects), PROJECTS_PAGE_SIZE,
                         "Pagination improper.")
        self.assertEqual(pages, 1, 'Wrong number of pages retrieved.')

    def test_get_linkable_projects(self):
        """
        Test for retrieving the projects for a given user.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        self.assertEqual(len(initial_projects), 0, "Database was not reseted!")
        test_proj = []
        user1 = TestFactory.create_user("another_user")
        for i in range(4):
            test_proj.append(
                TestFactory.create_project(self.test_user if i < 3 else user1,
                                           'test_proj' + str(i)))

        project_storage = self.structure_helper.get_project_folder(
            test_proj[0])
        result_meta = {
            DataTypeMetaData.KEY_OPERATION_TYPE: "Upload",
            DataTypeMetaData.KEY_AUTHOR: "John Doe",
            DataTypeMetaData.KEY_SUBJECT: "subj1",
            DataTypeMetaData.KEY_STATE: "test_state",
            DataTypeMetaData.KEY_NODE_TYPE: "test_data",
            DataTypeMetaData.KEY_DATE: "test_date",
            DataTypeMetaData.KEY_GID: generate_guid()
        }

        entity = dao.store_entity(model.AlgorithmCategory("category"))
        entity = dao.store_entity(
            model.AlgorithmGroup("module", "classname", entity.id))
        entity = dao.store_entity(model.Algorithm(entity.id, "algo"))
        operation = model.Operation(self.test_user.id, test_proj[0].id,
                                    entity.id, "")
        operation = dao.store_entity(operation)
        project_storage = os.path.join(project_storage, str(operation.id))
        os.makedirs(project_storage)
        entity = DataTypeMetaData(result_meta)
        datatype = dao.store_entity(
            model.DataType(module="test_data",
                           subject="subj1",
                           state="test_state",
                           operation_id=operation.id))
        linkable = self.project_service.get_linkable_projects_for_user(
            self.test_user.id, str(datatype.id))[0]
        self.assertEqual(len(linkable), 2,
                         "Wrong count of link-able projects!")
        proj_names = [project.name for project in linkable]
        self.assertTrue(test_proj[1].name in proj_names)
        self.assertTrue(test_proj[2].name in proj_names)
        self.assertFalse(test_proj[3].name in proj_names)

    def test_remove_project_happy_flow(self):
        """
        Standard flow for deleting a project.
        """
        inserted_project = TestFactory.create_project(self.test_user,
                                                      'test_proj')
        project_root = self.structure_helper.get_project_folder(
            inserted_project)
        projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(projects), 1, "Initializations failed!")
        self.assertTrue(os.path.exists(project_root),
                        "Something failed at insert time!")
        self.project_service.remove_project(inserted_project.id)
        projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(projects), 0, "Project was not deleted!")
        self.assertFalse(os.path.exists(project_root),
                         "Root folder not deleted!")

    def test_remove_project_wrong_id(self):
        """
        Flow for deleting a project giving an un-existing id.
        """
        TestFactory.create_project(self.test_user, 'test_proj')
        projects = dao.get_projects_for_user(self.test_user.id)
        self.assertEqual(len(projects), 1, "Initializations failed!")
        self.assertRaises(ProjectServiceException,
                          self.project_service.remove_project, 99)

    @staticmethod
    def _create_value_wrapper(test_user, test_project=None):
        """
        Creates a ValueWrapper dataType, and the associated parent Operation.
        This is also used in ProjectStructureTest.
        """
        if test_project is None:
            test_project = TestFactory.create_project(test_user, 'test_proj')
        operation = TestFactory.create_operation(test_user=test_user,
                                                 test_project=test_project)
        value_wrapper = ValueWrapper(data_value=5.0, data_name="my_value")
        value_wrapper.type = "ValueWrapper"
        value_wrapper.module = "tvb.datatypes.mapped_values"
        value_wrapper.subject = "John Doe"
        value_wrapper.state = "RAW_STATE"
        value_wrapper.set_operation_id(operation.id)
        adapter_instance = StoreAdapter([value_wrapper])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        all_value_wrappers = FlowService().get_available_datatypes(
            test_project.id, "tvb.datatypes.mapped_values.ValueWrapper")
        if len(all_value_wrappers) != 1:
            raise Exception("Should be only one value wrapper.")
        result_vw = ABCAdapter.load_entity_by_gid(all_value_wrappers[0][2])
        return test_project, result_vw.gid, operation.gid

    def __check_meta_data(self, expected_meta_data, new_datatype):
        """Validate Meta-Data"""
        mapp_keys = {
            DataTypeMetaData.KEY_SUBJECT: "subject",
            DataTypeMetaData.KEY_STATE: "state"
        }
        for key, value in expected_meta_data.iteritems():
            if key in mapp_keys:
                self.assertEqual(value, getattr(new_datatype, mapp_keys[key]))
            elif key == DataTypeMetaData.KEY_OPERATION_TAG:
                if DataTypeMetaData.KEY_OP_GROUP_ID in expected_meta_data:
                    ## We have a Group to check
                    op_group = new_datatype.parent_operation.fk_operation_group
                    op_group = dao.get_generic_entity(OperationGroup,
                                                      op_group)[0]
                    self.assertEqual(value, op_group.name)
                else:
                    self.assertEqual(value,
                                     new_datatype.parent_operation.user_group)

    def test_remove_project_node(self):
        """
        Test removing of a node from a project.
        """
        inserted_project, gid, gid_op = self._create_value_wrapper(
            self.test_user)
        project_to_link = model.Project("Link", self.test_user.id, "descript")
        project_to_link = dao.store_entity(project_to_link)
        exact_data = dao.get_datatype_by_gid(gid)
        dao.store_entity(model.Links(exact_data.id, project_to_link.id))
        self.assertTrue(
            dao.get_datatype_by_gid(gid) is not None,
            "Initialization problem!")

        operation_id = dao.get_generic_entity(model.Operation, gid_op,
                                              'gid')[0].id
        op_folder = self.structure_helper.get_project_folder(
            "test_proj", str(operation_id))
        self.assertTrue(os.path.exists(op_folder))
        sub_files = os.listdir(op_folder)
        self.assertEqual(2, len(sub_files))
        ### Validate that no more files are created than needed.

        self.project_service._remove_project_node_files(
            inserted_project.id, gid)
        sub_files = os.listdir(op_folder)
        self.assertEqual(1, len(sub_files))
        ### Validate that Operation GID_file is still there.

        op_folder = self.structure_helper.get_project_folder(
            "Link", str(operation_id + 1))
        sub_files = os.listdir(op_folder)
        self.assertEqual(1, len(sub_files))
        self.assertTrue(
            dao.get_datatype_by_gid(gid) is not None,
            "Data should still be there because of links")
        self.project_service._remove_project_node_files(
            project_to_link.id, gid)
        self.assertTrue(dao.get_datatype_by_gid(gid) is None)
        sub_files = os.listdir(op_folder)
        self.assertEqual(0, len(sub_files))

    def test_update_meta_data_simple(self):
        """
        Test the new update metaData for a simple data that is not part of a group.
        """
        inserted_project, gid, _ = self._create_value_wrapper(self.test_user)
        new_meta_data = {
            DataTypeOverlayDetails.DATA_SUBJECT: "new subject",
            DataTypeOverlayDetails.DATA_STATE: "second_state",
            DataTypeOverlayDetails.CODE_GID: gid,
            DataTypeOverlayDetails.CODE_OPERATION_TAG: 'new user group'
        }
        self.project_service.update_metadata(new_meta_data)

        new_datatype = dao.get_datatype_by_gid(gid)
        self.__check_meta_data(new_meta_data, new_datatype)

        op_path = FilesHelper().get_operation_meta_file_path(
            inserted_project.name, new_datatype.parent_operation.id)
        op_meta = XMLReader(op_path).read_metadata()
        self.assertEqual(op_meta['user_group'], 'new user group',
                         'UserGroup not updated!')

    def test_update_meta_data_group(self):
        """
        Test the new update metaData for a group of dataTypes.
        """
        datatypes, group_id = TestFactory.create_group(
            self.test_user, subject="test-subject-1")

        new_meta_data = {
            DataTypeOverlayDetails.DATA_SUBJECT: "new subject",
            DataTypeOverlayDetails.DATA_STATE: "updated_state",
            DataTypeOverlayDetails.CODE_OPERATION_GROUP_ID: group_id,
            DataTypeOverlayDetails.CODE_OPERATION_TAG: 'newGroupName'
        }
        self.project_service.update_metadata(new_meta_data)

        for datatype in datatypes:
            new_datatype = dao.get_datatype_by_id(datatype.id)
            self.assertEqual(group_id,
                             new_datatype.parent_operation.fk_operation_group)
            new_group = dao.get_generic_entity(model.OperationGroup,
                                               group_id)[0]
            self.assertEqual(new_group.name, "newGroupName")
            self.__check_meta_data(new_meta_data, new_datatype)

    def _create_datatypes(self, dt_factory, nr_of_dts):
        for idx in range(nr_of_dts):
            dt = Datatype1()
            dt.row1 = "value%i" % (idx, )
            dt.row2 = "value%i" % (idx + 1, )
            dt_factory._store_datatype(dt)

    def test_retrieve_project_full(self):
        """
        Tests full project information is retrieved by method `ProjectService.retrieve_project_full(...)`
        """
        dt_factory = datatypes_factory.DatatypesFactory()
        self._create_datatypes(dt_factory, 3)
        _, ops_nr, _, operations, pages_no = self.project_service.retrieve_project_full(
            dt_factory.project.id)
        self.assertEqual(
            ops_nr, 1,
            "DataType Factory should only use one operation to store all it's datatypes."
        )
        self.assertEqual(
            pages_no, 1,
            "DataType Factory should only use one operation to store all it's datatypes."
        )
        resulted_dts = operations[0]['results']
        self.assertEqual(len(resulted_dts), 3,
                         "3 datatypes should be created.")

    def test_get_project_structure(self):
        """
        Tests project structure is as expected and contains all datatypes
        """
        dt_factory = datatypes_factory.DatatypesFactory()
        self._create_datatypes(dt_factory, 3)
        node_json = self.project_service.get_project_structure(
            dt_factory.project, None, 'Data_State', 'Data_Subject', None)
        encoder = JSONEncoder()
        encoder.iterencode(node_json)
        # No exceptions were raised so far.
        project_dts = dao.get_datatypes_for_project(dt_factory.project.id)
        for dt in project_dts:
            self.assertTrue(
                dt.gid in node_json,
                "Should have all datatypes present in resulting json.")
Пример #7
0
class DatatypesFactory():
    """
        This class provides a set of methods that helps user to create 
        different data types for testing.
        These data types will be automatically stored in DB and file system if needed.
    """
    USER_FULL_NAME = "Datatype Factory User"
    DATATYPE_STATE = "RAW_DATA"
    DATATYPE_DATA = ["test", "for", "datatypes", "factory"]
    OPERATION_GROUP_NAME = "OperationsGroup"

    user = None
    project = None
    operation = None

    def __init__(self):
        now = datetime.now()
        micro_postfix = "_%d" % now.microsecond

        # Here create all structures needed later for data types creation
        self.files_helper = FilesHelper()

        # First create user
        user = model.User("datatype_factory_user" + micro_postfix, "test_pass",
                          "*****@*****.**" + micro_postfix, True, "user")
        self.user = dao.store_entity(user)

        # Now create a project
        project_service = ProjectService()
        data = dict(name='DatatypesFactoryProject' + micro_postfix,
                    description='test_desc',
                    users=[])
        self.project = project_service.store_project(self.user, True, None,
                                                     **data)

        # Create algorithm
        alg_category = model.AlgorithmCategory('one', True)
        dao.store_entity(alg_category)
        alg_group = model.AlgorithmGroup("test_module1", "classname1",
                                         alg_category.id)
        dao.store_entity(alg_group)
        algorithm = model.Algorithm(alg_group.id,
                                    'id',
                                    name='',
                                    req_data='',
                                    param_name='',
                                    output='')
        self.algorithm = dao.store_entity(algorithm)

        #Create an operation
        self.meta = {
            DataTypeMetaData.KEY_SUBJECT: self.USER_FULL_NAME,
            DataTypeMetaData.KEY_STATE: self.DATATYPE_STATE
        }
        operation = model.Operation(self.user.id,
                                    self.project.id,
                                    self.algorithm.id,
                                    'test parameters',
                                    meta=json.dumps(self.meta),
                                    status="FINISHED",
                                    method_name=ABCAdapter.LAUNCH_METHOD)
        self.operation = dao.store_entity(operation)

    def get_project(self):
        """
        Return project to which generated data types are assigned
        """
        return self.project

    def get_operation(self):
        """
        Return operation to which generated data types are assigned
        """
        return self.operation

    def get_user(self):
        """
        Return user to which generated data types are assigned
        """
        return self.user

    def _store_datatype(self, data_type, operation_id=None):
        """
        Launch adapter to store a create a persistent DataType.
        """
        operation_id = operation_id or self.operation.id
        data_type.type = data_type.__class__.__name__
        data_type.module = data_type.__class__.__module__
        data_type.subject = self.USER_FULL_NAME
        data_type.state = self.DATATYPE_STATE
        data_type.set_operation_id(operation_id)

        adapter_instance = StoreAdapter([data_type])
        operation = dao.get_operation_by_id(operation_id)
        OperationService().initiate_prelaunch(operation, adapter_instance, {})

        return data_type

    def create_simple_datatype(self,
                               subject=USER_FULL_NAME,
                               state=DATATYPE_STATE):
        """
        This method creates a simple data type
        """
        datatype_inst = Datatype1()
        self._fill_datatype(datatype_inst, subject, state)

        # Store data type
        return self._store_datatype(datatype_inst)

    def create_datatype_with_storage(self,
                                     subject=USER_FULL_NAME,
                                     state=DATATYPE_STATE,
                                     data=DATATYPE_DATA,
                                     operation_id=None):
        """
        This method creates and stores a data type which imply storage on the file system.
        """
        datatype_inst = Datatype2()
        self._fill_datatype(datatype_inst, subject, state, operation_id)

        datatype_inst.string_data = data

        return self._store_datatype(datatype_inst, operation_id)

    def _fill_datatype(self, datatype, subject, state, operation_id=None):
        """
        This method sets some common attributes on dataType 
        """
        operation_id = operation_id or self.operation.id
        datatype.subject = subject
        datatype.state = state
        # Set_operation_id also sets storage_path attribute
        datatype.set_operation_id(operation_id)

    def __create_operation(self):
        """
        Create a operation entity. Return the operation, algo_id and the storage path.
        """
        meta = {
            DataTypeMetaData.KEY_SUBJECT: "John Doe",
            DataTypeMetaData.KEY_STATE: "RAW"
        }
        algorithm, algo_group = FlowService(
        ).get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
        operation = model.Operation(self.user.id,
                                    self.project.id,
                                    algo_group.id,
                                    json.dumps(''),
                                    meta=json.dumps(meta),
                                    status="STARTED",
                                    method_name=ABCAdapter.LAUNCH_METHOD)
        operation = dao.store_entity(operation)
        storage_path = FilesHelper().get_project_folder(
            self.project, str(operation.id))
        return operation, algorithm.id, storage_path

    def create_connectivity(self):
        """
        Create a connectivity that will be used in "non-dummy" burst launches (with the actual simulator).
        """
        operation, algo_id, storage_path = self.__create_operation()
        connectivity = Connectivity(storage_path=storage_path)
        connectivity.weights = numpy.ones((74, 74))
        connectivity.centres = numpy.ones((74, 3))
        adapter_instance = StoreAdapter([connectivity])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        return algo_id, connectivity

    def create_timeseries(self, connectivity, ts_type=None, sensors=None):
        """
        Create a stored TimeSeries entity.
        """
        operation, _, storage_path = self.__create_operation()
        if ts_type == "EEG":
            time_series = TimeSeriesEEG(storage_path=storage_path,
                                        sensors=sensors)
        else:
            time_series = TimeSeriesRegion(storage_path=storage_path,
                                           connectivity=connectivity)
        data = numpy.random.random((10, 10, 10, 10))
        time = numpy.arange(10)
        time_series.write_data_slice(data)
        time_series.write_time_slice(time)
        adapter_instance = StoreAdapter([time_series])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        time_series = dao.get_datatype_by_gid(time_series.gid)
        return time_series

    def create_covariance(self, time_series):
        """
        :return: a stored DataType Covariance.
        """
        operation, _, storage_path = self.__create_operation()
        covariance = Covariance(storage_path=storage_path, source=time_series)
        covariance.write_data_slice(numpy.random.random((10, 10, 10)))
        adapter_instance = StoreAdapter([covariance])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        return covariance

    def create_crosscoherence(self, time_series):
        """
        :return: a stored entity of type CoherenceSpectrum
        """
        operation, _, storage_path = self.__create_operation()
        partial_coh = CoherenceSpectrum(array_data=numpy.random.random(
            (10, 10, 10, 10)),
                                        use_storage=False)
        coherence = CoherenceSpectrum(source=time_series,
                                      storage_path=storage_path,
                                      frequency=0.1,
                                      nfft=256)
        coherence.write_data_slice(partial_coh)
        coherence.close_file()
        adapter_instance = StoreAdapter([coherence])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        return coherence

    def create_crosscorrelation(self, time_series):
        """
        :return CrossCorrelation stored entity.
        """
        operation, _, storage_path = self.__create_operation()
        partial_corr = CrossCorrelation(array_data=numpy.random.random(
            (10, 10, 10, 10, 10)),
                                        use_storage=False)
        crossc = CrossCorrelation(source=time_series,
                                  storage_path=storage_path,
                                  time=range(10))
        crossc.write_data_slice(partial_corr)
        crossc.close_file()
        adapter_instance = StoreAdapter([crossc])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        return crossc

    def create_surface(self):
        """
        Create a dummy surface entity.
        :return: (Algorithm Identifier, stored Surface entity)
        """
        operation, algo_id, storage_path = self.__create_operation()
        surface = CorticalSurface(storage_path=storage_path)
        surface.vertices = numpy.array(
            [[-10, 0, 0], [0, 0, -10], [10, 0, 0], [0, 10, 0]], dtype=float)
        surface.triangles = numpy.array(
            [[0, 1, 2], [0, 1, 3], [1, 2, 3], [0, 2, 3]], dtype=int)
        surface.number_of_triangles = 4
        surface.number_of_vertices = 4
        surface.triangle_normals = numpy.ones((4, 3))
        surface.vertex_normals = numpy.ones((4, 3))
        surface.zero_based_triangles = True
        adapter_instance = StoreAdapter([surface])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        return algo_id, surface

    def create_connectivity_measure(self, connectivity):
        """
        :return: persisted entity ConnectivityMeasure
        """
        operation, _, storage_path = self.__create_operation()
        conn_measure = ConnectivityMeasure(storage_path=storage_path)
        conn_measure.connectivity = connectivity
        adapter_instance = StoreAdapter([conn_measure])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        return conn_measure

    def create_ICA(self, timeseries):
        """
        :return: persisted entity IndependentComponents
        """
        operation, _, storage_path = self.__create_operation()
        partial_ts = TimeSeries(use_storage=False)
        partial_ts.data = numpy.random.random((10, 10, 10, 10))
        partial_ica = IndependentComponents(
            source=partial_ts,
            component_time_series=numpy.random.random((10, 10, 10, 10)),
            prewhitening_matrix=numpy.random.random((10, 10, 10, 10)),
            unmixing_matrix=numpy.random.random((10, 10, 10, 10)),
            n_components=10,
            use_storage=False)
        ica = IndependentComponents(source=timeseries,
                                    n_components=10,
                                    storage_path=storage_path)
        ica.write_data_slice(partial_ica)
        adapter_instance = StoreAdapter([ica])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        return ica

    def create_datatype_group(
        self,
        subject=USER_FULL_NAME,
        state=DATATYPE_STATE,
    ):
        """ 
        This method creates, stores and returns a DataTypeGroup entity.
        """
        OPERATION_GROUP_RANGE = [json.dumps(["row1", ['a', 'b', 'c']])]
        group = model.OperationGroup(self.project.id,
                                     self.OPERATION_GROUP_NAME,
                                     OPERATION_GROUP_RANGE)
        group = dao.store_entity(group)

        datatype_group = model.DataTypeGroup(group,
                                             subject=subject,
                                             state=state,
                                             operation_id=self.operation.id)
        # Set storage path, before setting data
        datatype_group.storage_path = self.files_helper.get_project_folder(
            self.project, str(self.operation.id))
        datatype_group = dao.store_entity(datatype_group)

        # Now create some data types and add them to group
        for range_val in ['a', 'b', 'c']:
            operation = model.Operation(self.user.id,
                                        self.project.id,
                                        self.algorithm.id,
                                        'test parameters',
                                        meta=json.dumps(self.meta),
                                        status="FINISHED",
                                        method_name=ABCAdapter.LAUNCH_METHOD,
                                        range_values=json.dumps(
                                            {'row1': range_val}))
            operation.fk_operation_group = group.id
            operation = dao.store_entity(operation)
            datatype = self.create_datatype_with_storage(
                operation_id=operation.id)
            datatype.row1 = range_val
            datatype.fk_datatype_group = datatype_group.id
            datatype.set_operation_id(operation.id)
            dao.store_entity(datatype)

        return datatype_group