def launch(self, weights, tracts, input_data): """ Execute import operations: process the weights and tracts csv files, then use the reference connectivity passed as input_data for the rest of the attributes. :param weights: csv file containing the weights measures :param tracts: csv file containing the tracts measures :param input_data: a reference connectivity with the additional attributes :raises LaunchException: when the number of nodes in CSV files doesn't match the one in the connectivity """ dti_service = DTIPipelineService() dti_service._process_csv_file(weights, dti_service.WEIGHTS_FILE) dti_service._process_csv_file(tracts, dti_service.TRACT_FILE) weights_matrix = read_list_data(os.path.join(os.path.dirname(weights), dti_service.WEIGHTS_FILE)) tract_matrix = read_list_data(os.path.join(os.path.dirname(tracts), dti_service.TRACT_FILE)) FilesHelper.remove_files([os.path.join(os.path.dirname(weights), dti_service.WEIGHTS_FILE), os.path.join(os.path.dirname(tracts), dti_service.TRACT_FILE)]) if weights_matrix.shape[0] != input_data.orientations.shape[0]: raise LaunchException("The csv files define %s nodes but the connectivity you selected as reference " "has only %s nodes." % (weights_matrix.shape[0], input_data.orientations.shape[0])) result = Connectivity() result.storage_path = self.storage_path result.nose_correction = input_data.nose_correction result.centres = input_data.centres result.region_labels = input_data.region_labels result.weights = weights_matrix result.tract_lengths = tract_matrix result.orientations = input_data.orientations result.areas = input_data.areas result.cortical = input_data.cortical result.hemispheres = input_data.hemispheres return result
def launch(self, weights, weights_delimiter, tracts, tracts_delimiter, input_data): """ Execute import operations: process the weights and tracts csv files, then use the reference connectivity passed as input_data for the rest of the attributes. :param weights: csv file containing the weights measures :param tracts: csv file containing the tracts measures :param input_data: a reference connectivity with the additional attributes :raises LaunchException: when the number of nodes in CSV files doesn't match the one in the connectivity """ weights_matrix = self._read_csv_file(weights, weights_delimiter) tract_matrix = self._read_csv_file(tracts, tracts_delimiter) FilesHelper.remove_files([weights, tracts]) if weights_matrix.shape[0] != input_data.number_of_regions: raise LaunchException("The csv files define %s nodes but the connectivity you selected as reference " "has only %s nodes." % (weights_matrix.shape[0], input_data.number_of_regions)) result = Connectivity() result.storage_path = self.storage_path result.centres = input_data.centres result.region_labels = input_data.region_labels result.weights = weights_matrix result.tract_lengths = tract_matrix result.orientations = input_data.orientations result.areas = input_data.areas result.cortical = input_data.cortical result.hemispheres = input_data.hemispheres return result
class UploadField(Field): template = 'upload_field.jinja2' def __init__(self, required_type, form, name, disabled=False, required=False, label='', doc=''): super(UploadField, self).__init__(form, name, disabled, required, label, doc) self.required_type = required_type self.files_helper = FilesHelper() def fill_from_post(self, post_data): super(UploadField, self).fill_from_post(post_data) if self.data.file is None: self.data = None return project = dao.get_project_by_id(self.owner.project_id) temporary_storage = self.files_helper.get_project_folder( project, self.files_helper.TEMP_FOLDER) file_name = None try: uq_name = utils.date2string(datetime.now(), True) + '_' + str(0) file_name = TEMPORARY_PREFIX + uq_name + '_' + self.data.filename file_name = os.path.join(temporary_storage, file_name) temp_file = file_name with open(file_name, 'wb') as file_obj: file_obj.write(self.data.file.read()) except Exception as excep: # TODO: is this handled properly? self.files_helper.remove_files([file_name]) excep.message = 'Could not continue: Invalid input files' raise excep if file: self.data = file_name if temp_file: self.temp_file = temp_file
class TraitUploadField(TraitField): template = 'form_fields/upload_field.html' def __init__(self, traited_attribute, required_type, project_id, name, temporary_files, disabled=False): super(TraitUploadField, self).__init__(traited_attribute, project_id, name, disabled) self.required_type = required_type self.temporary_files = temporary_files self.files_helper = FilesHelper() def fill_from_post(self, post_data): super(TraitUploadField, self).fill_from_post(post_data) if self.data.file is None: self.data = None return project = dao.get_project_by_id(self.project_id) temporary_storage = self.files_helper.get_project_folder(project, self.files_helper.TEMP_FOLDER) file_name = None try: uq_name = utils.date2string(datetime.now(), True) + '_' + str(0) file_name = TEMPORARY_PREFIX + uq_name + '_' + self.data.filename file_name = os.path.join(temporary_storage, file_name) with open(file_name, 'wb') as file_obj: file_obj.write(self.data.file.read()) except Exception as excep: # TODO: is this handled properly? self.files_helper.remove_files([file_name]) excep.message = 'Could not continue: Invalid input files' raise excep if file_name: self.data = file_name self.temporary_files.append(file_name)
def launch(self, uploaded, rotate_x=0, rotate_y=0, rotate_z=0): """ Execute import operations: unpack ZIP and build Connectivity object as result. :param uploaded: an archive containing the Connectivity data to be imported :returns: `Connectivity` :raises LaunchException: when `uploaded` is empty or nonexistent :raises Exception: when * weights or tracts matrix is invalid (negative values, wrong shape) * any of the vector orientation, areas, cortical or hemisphere is \ different from the expected number of nodes """ if uploaded is None: raise LaunchException("Please select ZIP file which contains data to import") files = FilesHelper().unpack_zip(uploaded, self.storage_path) weights_matrix = None centres = None labels_vector = None tract_matrix = None orientation = None areas = None cortical_vector = None hemisphere_vector = None for file_name in files: if file_name.lower().find(self.WEIGHT_TOKEN) >= 0: weights_matrix = read_list_data(file_name) continue if file_name.lower().find(self.POSITION_TOKEN) >= 0: centres = read_list_data(file_name, skiprows=1, usecols=[1, 2, 3]) labels_vector = read_list_data(file_name, dtype=numpy.str, skiprows=1, usecols=[0]) continue if file_name.lower().find(self.TRACT_TOKEN) >= 0: tract_matrix = read_list_data(file_name) continue if file_name.lower().find(self.ORIENTATION_TOKEN) >= 0: orientation = read_list_data(file_name) continue if file_name.lower().find(self.AREA_TOKEN) >= 0: areas = read_list_data(file_name) continue if file_name.lower().find(self.CORTICAL_INFO) >= 0: cortical_vector = read_list_data(file_name, dtype=numpy.bool) continue if file_name.lower().find(self.HEMISPHERE_INFO) >= 0: hemisphere_vector = read_list_data(file_name, dtype=numpy.bool) continue ### Clean remaining text-files. FilesHelper.remove_files(files, True) result = Connectivity() result.storage_path = self.storage_path result.nose_correction = [rotate_x, rotate_y, rotate_z] ### Fill positions if centres is None: raise Exception("Positions for Connectivity Regions are required! " "We expect a file *position* inside the uploaded ZIP.") expected_number_of_nodes = len(centres) if expected_number_of_nodes < 2: raise Exception("A connectivity with at least 2 nodes is expected") result.centres = centres if labels_vector is not None: result.region_labels = labels_vector ### Fill and check weights if weights_matrix is not None: if numpy.any([x < 0 for x in weights_matrix.flatten()]): raise Exception("Negative values are not accepted in weights matrix! " "Please check your file, and use values >= 0") if weights_matrix.shape != (expected_number_of_nodes, expected_number_of_nodes): raise Exception("Unexpected shape for weights matrix! " "Should be %d x %d " % (expected_number_of_nodes, expected_number_of_nodes)) result.weights = weights_matrix ### Fill and check tracts if tract_matrix is not None: if numpy.any([x < 0 for x in tract_matrix.flatten()]): raise Exception("Negative values are not accepted in tracts matrix! " "Please check your file, and use values >= 0") if tract_matrix.shape != (expected_number_of_nodes, expected_number_of_nodes): raise Exception("Unexpected shape for tracts matrix! " "Should be %d x %d " % (expected_number_of_nodes, expected_number_of_nodes)) result.tract_lengths = tract_matrix if orientation is not None: if len(orientation) != expected_number_of_nodes: raise Exception("Invalid size for vector orientation. " "Expected the same as region-centers number %d" % expected_number_of_nodes) result.orientations = orientation if areas is not None: if len(areas) != expected_number_of_nodes: raise Exception("Invalid size for vector areas. " "Expected the same as region-centers number %d" % expected_number_of_nodes) result.areas = areas if cortical_vector is not None: if len(cortical_vector) != expected_number_of_nodes: raise Exception("Invalid size for vector cortical. " "Expected the same as region-centers number %d" % expected_number_of_nodes) result.cortical = cortical_vector if hemisphere_vector is not None: if len(hemisphere_vector) != expected_number_of_nodes: raise Exception("Invalid size for vector hemispheres. " "Expected the same as region-centers number %d" % expected_number_of_nodes) result.hemispheres = hemisphere_vector return result
class FilesHelperTest(TransactionalTestCase): """ This class contains tests for the tvb.core.entities.file.files_helper module. """ PROJECT_NAME = "test_proj" def setUp(self): """ Set up the context needed by the tests. """ self.files_helper = FilesHelper() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user, self.PROJECT_NAME) def tearDown(self): """ Remove generated project during tests. """ self.delete_project_folders() def test_check_created(self): """ Test standard flows for check created. """ self.files_helper.check_created() self.assertTrue(os.path.exists(root_storage), "Storage not created!") self.files_helper.check_created(os.path.join(root_storage, "test")) self.assertTrue(os.path.exists(root_storage), "Storage not created!") self.assertTrue(os.path.exists(os.path.join(root_storage, "test")), "Test directory not created!") def test_get_project_folder(self): """ Test the get_project_folder method which should create a folder in case it doesn't already exist. """ project_path = self.files_helper.get_project_folder(self.test_project) self.assertTrue(os.path.exists(project_path), "Folder doesn't exist") folder_path = self.files_helper.get_project_folder(self.test_project, "43") self.assertTrue(os.path.exists(project_path), "Folder doesn't exist") self.assertTrue(os.path.exists(folder_path), "Folder doesn't exist") def test_rename_project_structure(self): """ Try to rename the folder structure of a project. Standard flow. """ self.files_helper.get_project_folder(self.test_project) path, name = self.files_helper.rename_project_structure(self.test_project.name, "new_name") self.assertNotEqual(path, name, "Rename didn't take effect.") def test_rename_structure_same_name(self): """ Try to rename the folder structure of a project. Same name. """ self.files_helper.get_project_folder(self.test_project) self.assertRaises(FileStructureException, self.files_helper.rename_project_structure, self.test_project.name, self.PROJECT_NAME) def test_remove_project_structure(self): """ Check that remove project structure deletes the corresponding folder. Standard flow. """ full_path = self.files_helper.get_project_folder(self.test_project) self.assertTrue(os.path.exists(full_path), "Folder was not created.") self.files_helper.remove_project_structure(self.test_project.name) self.assertFalse(os.path.exists(full_path), "Project folder not deleted.") def test_write_project_metadata(self): """ Write XML for test-project. """ self.files_helper.write_project_metadata(self.test_project) expected_file = self.files_helper.get_project_meta_file_path(self.PROJECT_NAME) self.assertTrue(os.path.exists(expected_file)) project_meta = XMLReader(expected_file).read_metadata() loaded_project = model.Project(None, None) loaded_project.from_dict(project_meta, self.test_user.id) self.assertEqual(self.test_project.name, loaded_project.name) self.assertEqual(self.test_project.description, loaded_project.description) self.assertEqual(self.test_project.gid, loaded_project.gid) expected_dict = self.test_project.to_dict()[1] del expected_dict['last_updated'] found_dict = loaded_project.to_dict()[1] del found_dict['last_updated'] self.assertDictContainsSubset(expected_dict, found_dict) self.assertDictContainsSubset(found_dict, expected_dict) def test_write_operation_metadata(self): """ Test that a correct XML is created for an operation. """ operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project) expected_file = self.files_helper.get_operation_meta_file_path(self.PROJECT_NAME, operation.id) self.assertFalse(os.path.exists(expected_file)) self.files_helper.write_operation_metadata(operation) self.assertTrue(os.path.exists(expected_file)) operation_meta = XMLReader(expected_file).read_metadata() loaded_operation = model.Operation(None, None, None, None) loaded_operation.from_dict(operation_meta, dao) expected_dict = operation.to_dict()[1] found_dict = loaded_operation.to_dict()[1] for key, value in expected_dict.iteritems(): self.assertEqual(str(value), str(found_dict[key])) # Now validate that operation metaData can be also updated self.assertNotEqual("new_group_name", found_dict['user_group']) self.files_helper.update_operation_metadata(self.PROJECT_NAME, "new_group_name", operation.id) found_dict = XMLReader(expected_file).read_metadata() self.assertEqual("new_group_name", found_dict['user_group']) def test_remove_dt_happy_flow(self): """ Happy flow for removing a file related to a DataType. """ folder_path = self.files_helper.get_project_folder(self.test_project, "42") datatype = MappedType() datatype.storage_path = folder_path open(datatype.get_storage_file_path(), 'w') self.assertTrue(os.path.exists(datatype.get_storage_file_path()), "Test file was not created!") self.files_helper.remove_datatype(datatype) self.assertFalse(os.path.exists(datatype.get_storage_file_path()), "Test file was not deleted!") def test_remove_dt_non_existent(self): """ Try to call remove on a dataType with no H5 file. Should work. """ folder_path = self.files_helper.get_project_folder(self.test_project, "42") datatype = MappedType() datatype.storage_path = folder_path self.assertFalse(os.path.exists(datatype.get_storage_file_path())) self.files_helper.remove_datatype(datatype) def test_move_datatype(self): """ Make sure associated H5 file is moved to a correct new location. """ folder_path = self.files_helper.get_project_folder(self.test_project, "42") datatype = MappedType() datatype.storage_path = folder_path open(datatype.get_storage_file_path(), 'w') self.assertTrue(os.path.exists(datatype.get_storage_file_path()), "Test file was not created!") self.files_helper.move_datatype(datatype, self.PROJECT_NAME + '11', "43") self.assertFalse(os.path.exists(datatype.get_storage_file_path()), "Test file was not moved!") datatype.storage_path = self.files_helper.get_project_folder(self.PROJECT_NAME + '11', "43") self.assertTrue(os.path.exists(datatype.get_storage_file_path()), "Test file was not created!") def test_find_relative_path(self): """ Tests that relative path is computed properly. """ rel_path = self.files_helper.find_relative_path("/root/up/to/here/test/it/now", "/root/up/to/here") self.assertEqual(rel_path, os.sep.join(["test", "it", "now"]), "Did not extract relative path as expected.") def test_remove_files_valid(self): """ Pass a valid list of files and check they are all removed. """ file_list = ["test1", "test2", "test3"] for file_n in file_list: fp = open(file_n, 'w') fp.write('test') fp.close() for file_n in file_list: self.assertTrue(os.path.isfile(file_n)) self.files_helper.remove_files(file_list) for file_n in file_list: self.assertFalse(os.path.isfile(file_n)) def test_remove_folder(self): """ Pass an open file pointer, but ignore exceptions. """ folder_name = "test_folder" os.mkdir(folder_name) self.assertTrue(os.path.isdir(folder_name), "Folder should be created.") self.files_helper.remove_folder(folder_name) self.assertFalse(os.path.isdir(folder_name), "Folder should be deleted.") def test_remove_folder_non_existing_ignore_exc(self): """ Pass an open file pointer, but ignore exceptions. """ folder_name = "test_folder" self.assertFalse(os.path.isdir(folder_name), "Folder should not exist before call.") self.files_helper.remove_folder(folder_name, ignore_errors=True) def test_remove_folder_non_existing(self): """ Pass an open file pointer, but ignore exceptions. """ folder_name = "test_folder" self.assertFalse(os.path.isdir(folder_name), "Folder should not exist before call.") self.assertRaises(FileStructureException, self.files_helper.remove_folder, folder_name, False)
class TestFilesHelper(TransactionalTestCase): """ This class contains tests for the tvb.core.entities.file.files_helper module. """ PROJECT_NAME = "test_proj" def transactional_setup_method(self): """ Set up the context needed by the tests. """ self.files_helper = FilesHelper() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user, self.PROJECT_NAME) def transactional_teardown_method(self): """ Remove generated project during tests. """ self.delete_project_folders() def test_check_created(self): """ Test standard flows for check created. """ self.files_helper.check_created() assert os.path.exists(root_storage), "Storage not created!" self.files_helper.check_created(os.path.join(root_storage, "test")) assert os.path.exists(root_storage), "Storage not created!" assert os.path.exists(os.path.join(root_storage, "test")), "Test directory not created!" def test_get_project_folder(self): """ Test the get_project_folder method which should create a folder in case it doesn't already exist. """ project_path = self.files_helper.get_project_folder(self.test_project) assert os.path.exists(project_path), "Folder doesn't exist" folder_path = self.files_helper.get_project_folder(self.test_project, "43") assert os.path.exists(project_path), "Folder doesn't exist" assert os.path.exists(folder_path), "Folder doesn't exist" def test_rename_project_structure(self): """ Try to rename the folder structure of a project. Standard flow. """ self.files_helper.get_project_folder(self.test_project) path, name = self.files_helper.rename_project_structure(self.test_project.name, "new_name") assert path != name, "Rename didn't take effect." def test_rename_structure_same_name(self): """ Try to rename the folder structure of a project. Same name. """ self.files_helper.get_project_folder(self.test_project) with pytest.raises(FileStructureException): self.files_helper.rename_project_structure(self.test_project.name, self.PROJECT_NAME) def test_remove_project_structure(self): """ Check that remove project structure deletes the corresponding folder. Standard flow. """ full_path = self.files_helper.get_project_folder(self.test_project) assert os.path.exists(full_path), "Folder was not created." self.files_helper.remove_project_structure(self.test_project.name) assert not os.path.exists(full_path), "Project folder not deleted." def test_write_project_metadata(self): """ Write XML for test-project. """ self.files_helper.write_project_metadata(self.test_project) expected_file = self.files_helper.get_project_meta_file_path(self.PROJECT_NAME) assert os.path.exists(expected_file) project_meta = XMLReader(expected_file).read_metadata() loaded_project = model_project.Project(None, None) loaded_project.from_dict(project_meta, self.test_user.id) assert self.test_project.name == loaded_project.name assert self.test_project.description == loaded_project.description assert self.test_project.gid == loaded_project.gid expected_dict = self.test_project.to_dict()[1] del expected_dict['last_updated'] found_dict = loaded_project.to_dict()[1] del found_dict['last_updated'] self._dictContainsSubset(expected_dict, found_dict) self._dictContainsSubset(found_dict, expected_dict) def test_write_operation_metadata(self): """ Test that a correct XML is created for an operation. """ operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project) expected_file = self.files_helper.get_operation_meta_file_path(self.PROJECT_NAME, operation.id) assert not os.path.exists(expected_file) self.files_helper.write_operation_metadata(operation) assert os.path.exists(expected_file) operation_meta = XMLReader(expected_file).read_metadata() loaded_operation = model_operation.Operation(None, None, None, None) loaded_operation.from_dict(operation_meta, dao, user_id=self.test_user.id) expected_dict = operation.to_dict()[1] found_dict = loaded_operation.to_dict()[1] for key, value in expected_dict.items(): assert str(value) == str(found_dict[key]) # Now validate that operation metaData can be also updated assert "new_group_name" != found_dict['user_group'] self.files_helper.update_operation_metadata(self.PROJECT_NAME, "new_group_name", operation.id) found_dict = XMLReader(expected_file).read_metadata() assert "new_group_name" == found_dict['user_group'] def test_remove_dt_happy_flow(self, dummy_datatype_index_factory): """ Happy flow for removing a file related to a DataType. """ datatype = dummy_datatype_index_factory() h5_path = h5.path_for_stored_index(datatype) assert os.path.exists(h5_path), "Test file was not created!" self.files_helper.remove_datatype_file(h5_path) assert not os.path.exists(h5_path), "Test file was not deleted!" def test_remove_dt_non_existent(self, dummy_datatype_index_factory): """ Try to call remove on a dataType with no H5 file. Should work. """ datatype = dummy_datatype_index_factory() h5_path = h5.path_for_stored_index(datatype) wrong_path = os.path.join(h5_path, "WRONG_PATH") assert not os.path.exists(wrong_path) self.files_helper.remove_datatype_file(wrong_path) def test_move_datatype(self, dummy_datatype_index_factory): """ Make sure associated H5 file is moved to a correct new location. """ datatype = dummy_datatype_index_factory(project=self.test_project) old_file_path = h5.path_for_stored_index(datatype) assert os.path.exists(old_file_path), "Test file was not created!" full_path = h5.path_for_stored_index(datatype) self.files_helper.move_datatype(datatype, self.PROJECT_NAME + '2', "1", full_path) assert not os.path.exists(old_file_path), "Test file was not moved!" datatype.fk_from_operation = 43 new_file_path = os.path.join(self.files_helper.get_project_folder(self.PROJECT_NAME + '2', "1"), old_file_path.split("\\")[-1]) assert os.path.exists(new_file_path), "Test file was not created!" def test_find_relative_path(self): """ Tests that relative path is computed properly. """ rel_path = self.files_helper.find_relative_path("/root/up/to/here/test/it/now", "/root/up/to/here") assert rel_path == os.sep.join(["test", "it", "now"]), "Did not extract relative path as expected." def test_remove_files_valid(self): """ Pass a valid list of files and check they are all removed. """ file_list = ["test1", "test2", "test3"] for file_n in file_list: fp = open(file_n, 'w') fp.write('test') fp.close() for file_n in file_list: assert os.path.isfile(file_n) self.files_helper.remove_files(file_list) for file_n in file_list: assert not os.path.isfile(file_n) def test_remove_folder(self): """ Pass an open file pointer, but ignore exceptions. """ folder_name = "test_folder" os.mkdir(folder_name) assert os.path.isdir(folder_name), "Folder should be created." self.files_helper.remove_folder(folder_name) assert not os.path.isdir(folder_name), "Folder should be deleted." def test_remove_folder_non_existing_ignore_exc(self): """ Pass an open file pointer, but ignore exceptions. """ folder_name = "test_folder" assert not os.path.isdir(folder_name), "Folder should not exist before call." self.files_helper.remove_folder(folder_name, ignore_errors=True) def test_remove_folder_non_existing(self): """ Pass an open file pointer, but ignore exceptions. """ folder_name = "test_folder" assert not os.path.isdir(folder_name), "Folder should not exist before call." with pytest.raises(FileStructureException): self.files_helper.remove_folder(folder_name, False) def _dictContainsSubset(self, expected, actual, msg=None): """Checks whether actual is a superset of expected.""" missing = [] mismatched = [] for key, value in expected.items(): if key not in actual: return False elif value != actual[key]: return False return True
def launch(self, view_model): # type: (ZIPConnectivityImporterModel) -> [ConnectivityIndex] """ Execute import operations: unpack ZIP and build Connectivity object as result. :raises LaunchException: when `uploaded` is empty or nonexistent :raises Exception: when * weights or tracts matrix is invalid (negative values, wrong shape) * any of the vector orientation, areas, cortical or hemisphere is \ different from the expected number of nodes """ if view_model.uploaded is None: raise LaunchException( "Please select ZIP file which contains data to import") files = FilesHelper().unpack_zip(view_model.uploaded, self.storage_path) weights_matrix = None centres = None labels_vector = None tract_matrix = None orientation = None areas = None cortical_vector = None hemisphere_vector = None for file_name in files: file_name_low = file_name.lower() if self.WEIGHT_TOKEN in file_name_low: weights_matrix = self.read_list_data(file_name) elif self.CENTRES_TOKEN in file_name_low or self.CENTRES_TOKEN2 in file_name_low: centres = self.read_list_data(file_name, usecols=[1, 2, 3]) labels_vector = self.read_list_data(file_name, dtype=numpy.str, usecols=[0]) elif self.TRACT_TOKEN in file_name_low: tract_matrix = self.read_list_data(file_name) elif self.ORIENTATION_TOKEN in file_name_low: orientation = self.read_list_data(file_name) elif self.AREA_TOKEN in file_name_low: areas = self.read_list_data(file_name) elif self.CORTICAL_INFO in file_name_low: cortical_vector = self.read_list_data(file_name, dtype=numpy.bool) elif self.HEMISPHERE_INFO in file_name_low: hemisphere_vector = self.read_list_data(file_name, dtype=numpy.bool) # Clean remaining text-files. FilesHelper.remove_files(files, True) result = Connectivity() # Fill positions if centres is None: raise Exception( "Region centres are required for Connectivity Regions! " "We expect a file that contains *centres* inside the uploaded ZIP." ) expected_number_of_nodes = len(centres) if expected_number_of_nodes < 2: raise Exception("A connectivity with at least 2 nodes is expected") result.centres = centres if labels_vector is not None: result.region_labels = labels_vector # Fill and check weights if weights_matrix is not None: if weights_matrix.shape != (expected_number_of_nodes, expected_number_of_nodes): raise Exception( "Unexpected shape for weights matrix! " "Should be %d x %d " % (expected_number_of_nodes, expected_number_of_nodes)) result.weights = weights_matrix if view_model.normalization: result.weights = result.scaled_weights( view_model.normalization) # Fill and check tracts. Allow empty files for tracts, they will be computed by tvb-library. if tract_matrix is not None: if tract_matrix.size != 0: if numpy.any([x < 0 for x in tract_matrix.flatten()]): raise Exception( "Negative values are not accepted in tracts matrix! " "Please check your file, and use values >= 0") if tract_matrix.shape != (expected_number_of_nodes, expected_number_of_nodes): raise Exception( "Unexpected shape for tracts matrix! " "Should be %d x %d " % (expected_number_of_nodes, expected_number_of_nodes)) result.tract_lengths = tract_matrix if orientation is not None: if len(orientation) != expected_number_of_nodes: raise Exception( "Invalid size for vector orientation. " "Expected the same as region-centers number %d" % expected_number_of_nodes) result.orientations = orientation if areas is not None: if len(areas) != expected_number_of_nodes: raise Exception( "Invalid size for vector areas. " "Expected the same as region-centers number %d" % expected_number_of_nodes) result.areas = areas if cortical_vector is not None: if len(cortical_vector) != expected_number_of_nodes: raise Exception( "Invalid size for vector cortical. " "Expected the same as region-centers number %d" % expected_number_of_nodes) result.cortical = cortical_vector if hemisphere_vector is not None: if len(hemisphere_vector) != expected_number_of_nodes: raise Exception( "Invalid size for vector hemispheres. " "Expected the same as region-centers number %d" % expected_number_of_nodes) result.hemispheres = hemisphere_vector result.configure() return h5.store_complete(result, self.storage_path)
class AlgorithmService(object): """ Service Layer for Algorithms manipulation (e.g. find all Uploaders, Filter algo by category, etc) """ def __init__(self): self.logger = get_logger(self.__class__.__module__) self.file_helper = FilesHelper() @staticmethod def get_category_by_id(identifier): """ Pass to DAO the retrieve of category by ID operation.""" return dao.get_category_by_id(identifier) @staticmethod def get_raw_categories(): """:returns: AlgorithmCategory list of entities that have results in RAW state (Creators/Uploaders)""" return dao.get_raw_categories() @staticmethod def get_visualisers_category(): """Retrieve all Algorithm categories, with display capability""" result = dao.get_visualisers_categories() if not result: raise ValueError("View Category not found!!!") return result[0] @staticmethod def get_algorithm_by_identifier(ident): """ Retrieve Algorithm entity by ID. Return None, if ID is not found in DB. """ return dao.get_algorithm_by_id(ident) @staticmethod def get_operation_numbers(proj_id): """ Count total number of operations started for current project. """ return dao.get_operation_numbers(proj_id) def _prepare_dt_display_name(self, dt_index, dt): # dt is a result of the get_values_of_datatype function db_dt = dao.get_generic_entity(dt_index, dt[2], "gid") display_name = db_dt[0].display_name display_name += ' - ' + (dt[3] or "None ") # Subject if dt[5]: display_name += ' - From: ' + str(dt[5]) else: display_name += utils.date2string(dt[4]) if dt[6]: display_name += ' - ' + str(dt[6]) display_name += ' - ID:' + str(dt[0]) return display_name def fill_selectfield_with_datatypes(self, field, project_id, extra_conditions=None): # type: (TraitDataTypeSelectField, int, list) -> None filtering_conditions = FilterChain() filtering_conditions += field.conditions filtering_conditions += extra_conditions datatypes, _ = dao.get_values_of_datatype(project_id, field.datatype_index, filtering_conditions) datatype_options = [] for datatype in datatypes: display_name = self._prepare_dt_display_name( field.datatype_index, datatype) datatype_options.append((datatype, display_name)) field.datatype_options = datatype_options def _fill_form_with_datatypes(self, form, project_id, extra_conditions=None): for form_field in form.trait_fields: if isinstance(form_field, TraitDataTypeSelectField): self.fill_selectfield_with_datatypes(form_field, project_id, extra_conditions) return form def prepare_adapter_form(self, adapter_instance=None, form_instance=None, project_id=None, extra_conditions=None): # type: (ABCAdapter, ABCAdapterForm, int, []) -> ABCAdapterForm form = None if form_instance is not None: form = form_instance elif adapter_instance is not None: form = adapter_instance.get_form()() if form is None: raise OperationException("Cannot prepare None form") form = self._fill_form_with_datatypes(form, project_id, extra_conditions) return form def _prepare_upload_post_data(self, form, post_data, project_id): for form_field in form.trait_fields: if isinstance(form_field, TraitUploadField) and form_field.name in post_data: field = post_data[form_field.name] file_name = None if hasattr(field, 'file') and field.file is not None: project = dao.get_project_by_id(project_id) temporary_storage = self.file_helper.get_project_folder( project, self.file_helper.TEMP_FOLDER) try: uq_name = utils.date2string(datetime.now(), True) + '_' + str(0) file_name = TEMPORARY_PREFIX + uq_name + '_' + field.filename file_name = os.path.join(temporary_storage, file_name) with open(file_name, 'wb') as file_obj: file_obj.write(field.file.read()) except Exception as excep: # TODO: is this handled properly? self.file_helper.remove_files([file_name]) excep.message = 'Could not continue: Invalid input files' raise excep post_data[form_field.name] = file_name def fill_adapter_form(self, adapter_instance, post_data, project_id): # type: (ABCAdapter, dict, int) -> ABCAdapterForm form = self.prepare_adapter_form(adapter_instance=adapter_instance, project_id=project_id) if isinstance(form, ABCUploaderForm): self._prepare_upload_post_data(form, post_data, project_id) if 'fill_defaults' in post_data: form.fill_from_post_plus_defaults(post_data) else: form.fill_from_post(post_data) return form def prepare_adapter(self, stored_adapter): adapter_module = stored_adapter.module adapter_name = stored_adapter.classname try: # Prepare Adapter Interface, by populating with existent data, # in case of a parameter of type DataType. adapter_instance = ABCAdapter.build_adapter(stored_adapter) return adapter_instance except Exception: self.logger.exception('Not found:' + adapter_name + ' in:' + adapter_module) raise OperationException("Could not prepare " + adapter_name) @staticmethod def get_algorithm_by_module_and_class(module, classname): """ Get the db entry from the algorithm table for the given module and class. """ return dao.get_algorithm_by_module(module, classname) @staticmethod def create_link(data_ids, project_id): """ For a list of dataType IDs and a project id create all the required links. """ for data in data_ids: link = Links(data, project_id) dao.store_entity(link) @staticmethod def remove_link(dt_id, project_id): """ Remove the link from the datatype given by dt_id to project given by project_id. """ link = dao.get_link(dt_id, project_id) if link is not None: dao.remove_entity(Links, link.id) @staticmethod def get_upload_algorithms(): """ :return: List of StoredAdapter entities """ categories = dao.get_uploader_categories() categories_ids = [categ.id for categ in categories] return dao.get_adapters_from_categories(categories_ids) @staticmethod def get_analyze_groups(): """ :return: list of AlgorithmTransientGroup entities """ categories = dao.get_launchable_categories(elimin_viewers=True) categories_ids = [categ.id for categ in categories] stored_adapters = dao.get_adapters_from_categories(categories_ids) groups_list = [] for adapter in stored_adapters: # For empty groups, this time, we fill the actual adapter group = AlgorithmTransientGroup( adapter.group_name or adapter.displayname, adapter.group_description or adapter.description) group = AlgorithmService._find_group(groups_list, group) group.children.append(adapter) return categories[0], groups_list @staticmethod def _find_group(groups_list, new_group): for i in range(len(groups_list) - 1, -1, -1): current_group = groups_list[i] if current_group.name == new_group.name and current_group.description == new_group.description: return current_group # Not found in list groups_list.append(new_group) return new_group def get_visualizers_for_group(self, dt_group_gid): categories = dao.get_visualisers_categories() return self._get_launchable_algorithms(dt_group_gid, categories)[1] def get_launchable_algorithms(self, datatype_gid): """ :param datatype_gid: Filter only algorithms compatible with this GUID :return: dict(category_name: List AlgorithmTransientGroup) """ categories = dao.get_launchable_categories() datatype_instance, filtered_adapters, has_operations_warning = self._get_launchable_algorithms( datatype_gid, categories) categories_dict = dict() for c in categories: categories_dict[c.id] = c.displayname return self._group_adapters_by_category( filtered_adapters, categories_dict), has_operations_warning def _get_launchable_algorithms(self, datatype_gid, categories): datatype_instance = dao.get_datatype_by_gid(datatype_gid) return self.get_launchable_algorithms_for_datatype( datatype_instance, categories) def get_launchable_algorithms_for_datatype(self, datatype, categories): data_class = datatype.__class__ all_compatible_classes = [data_class.__name__] for one_class in getmro(data_class): # from tvb.basic.traits.types_mapped import MappedType if issubclass( one_class, DataType ) and one_class.__name__ not in all_compatible_classes: all_compatible_classes.append(one_class.__name__) self.logger.debug("Searching in categories: " + str(categories) + " for classes " + str(all_compatible_classes)) categories_ids = [categ.id for categ in categories] launchable_adapters = dao.get_applicable_adapters( all_compatible_classes, categories_ids) filtered_adapters = [] has_operations_warning = False for stored_adapter in launchable_adapters: filter_chain = FilterChain.from_json( stored_adapter.datatype_filter) try: if not filter_chain or filter_chain.get_python_filter_equivalent( datatype): filtered_adapters.append(stored_adapter) except (TypeError, InvalidFilterChainInput): self.logger.exception("Could not evaluate filter on " + str(stored_adapter)) has_operations_warning = True return datatype, filtered_adapters, has_operations_warning def _group_adapters_by_category(self, stored_adapters, categories): """ :param stored_adapters: list StoredAdapter :return: dict(category_name: List AlgorithmTransientGroup), empty groups all in the same AlgorithmTransientGroup """ categories_dict = dict() for adapter in stored_adapters: category_name = categories.get(adapter.fk_category) if category_name in categories_dict: groups_list = categories_dict.get(category_name) else: groups_list = [] categories_dict[category_name] = groups_list group = AlgorithmTransientGroup(adapter.group_name, adapter.group_description) group = self._find_group(groups_list, group) group.children.append(adapter) return categories_dict @staticmethod def get_generic_entity(entity_type, filter_value, select_field): return dao.get_generic_entity(entity_type, filter_value, select_field) ########################################################################## ######## Methods below are for MeasurePoint selections ################### ########################################################################## @staticmethod def get_selections_for_project(project_id, datatype_gid): """ Retrieved from DB saved selections for current project. If a certain selection doesn't have all the labels between the labels of the given connectivity than this selection will not be returned. :returns: List of ConnectivitySelection entities. """ return dao.get_selections_for_project(project_id, datatype_gid) @staticmethod def save_measure_points_selection(ui_name, selected_nodes, datatype_gid, project_id): """ Store in DB a ConnectivitySelection. """ select_entities = dao.get_selections_for_project( project_id, datatype_gid, ui_name) if select_entities: # when the name of the new selection is within the available selections then update that selection: select_entity = select_entities[0] select_entity.selected_nodes = selected_nodes else: select_entity = MeasurePointsSelection(ui_name, selected_nodes, datatype_gid, project_id) dao.store_entity(select_entity) ########################################################################## ########## Bellow are PSE Filters specific methods ################## ########################################################################## @staticmethod def get_stored_pse_filters(datatype_group_gid): return dao.get_stored_pse_filters(datatype_group_gid) @staticmethod def save_pse_filter(ui_name, datatype_group_gid, threshold_value, applied_on): """ Store in DB a PSE filter. """ select_entities = dao.get_stored_pse_filters(datatype_group_gid, ui_name) if select_entities: # when the UI name is already in DB, update the existing entity select_entity = select_entities[0] select_entity.threshold_value = threshold_value select_entity.applied_on = applied_on # this is the type, as in applied on size or color else: select_entity = StoredPSEFilter(ui_name, datatype_group_gid, threshold_value, applied_on) dao.store_entity(select_entity)
class FilesHelperTest(TransactionalTestCase): """ This class contains tests for the tvb.core.entities.file.files_helper module. """ PROJECT_NAME = "test_proj" def setUp(self): """ Set up the context needed by the tests. """ self.files_helper = FilesHelper() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user, self.PROJECT_NAME) def tearDown(self): """ Remove generated project during tests. """ self.delete_project_folders() def test_check_created(self): """ Test standard flows for check created. """ self.files_helper.check_created() self.assertTrue(os.path.exists(root_storage), "Storage not created!") self.files_helper.check_created(os.path.join(root_storage, "test")) self.assertTrue(os.path.exists(root_storage), "Storage not created!") self.assertTrue(os.path.exists(os.path.join(root_storage, "test")), "Test directory not created!") def test_get_project_folder(self): """ Test the get_project_folder method which should create a folder in case it doesn't already exist. """ project_path = self.files_helper.get_project_folder(self.test_project) self.assertTrue(os.path.exists(project_path), "Folder doesn't exist") folder_path = self.files_helper.get_project_folder( self.test_project, "43") self.assertTrue(os.path.exists(project_path), "Folder doesn't exist") self.assertTrue(os.path.exists(folder_path), "Folder doesn't exist") def test_rename_project_structure(self): """ Try to rename the folder structure of a project. Standard flow. """ self.files_helper.get_project_folder(self.test_project) path, name = self.files_helper.rename_project_structure( self.test_project.name, "new_name") self.assertNotEqual(path, name, "Rename didn't take effect.") def test_rename_structure_same_name(self): """ Try to rename the folder structure of a project. Same name. """ self.files_helper.get_project_folder(self.test_project) self.assertRaises(FileStructureException, self.files_helper.rename_project_structure, self.test_project.name, self.PROJECT_NAME) def test_remove_project_structure(self): """ Check that remove project structure deletes the corresponding folder. Standard flow. """ full_path = self.files_helper.get_project_folder(self.test_project) self.assertTrue(os.path.exists(full_path), "Folder was not created.") self.files_helper.remove_project_structure(self.test_project.name) self.assertFalse(os.path.exists(full_path), "Project folder not deleted.") def test_write_project_metadata(self): """ Write XML for test-project. """ self.files_helper.write_project_metadata(self.test_project) expected_file = self.files_helper.get_project_meta_file_path( self.PROJECT_NAME) self.assertTrue(os.path.exists(expected_file)) project_meta = XMLReader(expected_file).read_metadata() loaded_project = model.Project(None, None) loaded_project.from_dict(project_meta, self.test_user.id) self.assertEqual(self.test_project.name, loaded_project.name) self.assertEqual(self.test_project.description, loaded_project.description) self.assertEqual(self.test_project.gid, loaded_project.gid) expected_dict = self.test_project.to_dict()[1] del expected_dict['last_updated'] found_dict = loaded_project.to_dict()[1] del found_dict['last_updated'] self.assertDictContainsSubset(expected_dict, found_dict) self.assertDictContainsSubset(found_dict, expected_dict) def test_write_operation_metadata(self): """ Test that a correct XML is created for an operation. """ operation = TestFactory.create_operation( test_user=self.test_user, test_project=self.test_project) expected_file = self.files_helper.get_operation_meta_file_path( self.PROJECT_NAME, operation.id) self.assertFalse(os.path.exists(expected_file)) self.files_helper.write_operation_metadata(operation) self.assertTrue(os.path.exists(expected_file)) operation_meta = XMLReader(expected_file).read_metadata() loaded_operation = model.Operation(None, None, None, None) loaded_operation.from_dict(operation_meta, dao) expected_dict = operation.to_dict()[1] found_dict = loaded_operation.to_dict()[1] for key, value in expected_dict.iteritems(): self.assertEqual(str(value), str(found_dict[key])) # Now validate that operation metaData can be also updated self.assertNotEqual("new_group_name", found_dict['user_group']) self.files_helper.update_operation_metadata(self.PROJECT_NAME, "new_group_name", operation.id) found_dict = XMLReader(expected_file).read_metadata() self.assertEqual("new_group_name", found_dict['user_group']) def test_remove_dt_happy_flow(self): """ Happy flow for removing a file related to a DataType. """ folder_path = self.files_helper.get_project_folder( self.test_project, "42") datatype = MappedType() datatype.storage_path = folder_path open(datatype.get_storage_file_path(), 'w') self.assertTrue(os.path.exists(datatype.get_storage_file_path()), "Test file was not created!") self.files_helper.remove_datatype(datatype) self.assertFalse(os.path.exists(datatype.get_storage_file_path()), "Test file was not deleted!") def test_remove_dt_non_existent(self): """ Try to call remove on a dataType with no H5 file. Should work. """ folder_path = self.files_helper.get_project_folder( self.test_project, "42") datatype = MappedType() datatype.storage_path = folder_path self.assertFalse(os.path.exists(datatype.get_storage_file_path())) self.files_helper.remove_datatype(datatype) def test_move_datatype(self): """ Make sure associated H5 file is moved to a correct new location. """ folder_path = self.files_helper.get_project_folder( self.test_project, "42") datatype = MappedType() datatype.storage_path = folder_path open(datatype.get_storage_file_path(), 'w') self.assertTrue(os.path.exists(datatype.get_storage_file_path()), "Test file was not created!") self.files_helper.move_datatype(datatype, self.PROJECT_NAME + '11', "43") self.assertFalse(os.path.exists(datatype.get_storage_file_path()), "Test file was not moved!") datatype.storage_path = self.files_helper.get_project_folder( self.PROJECT_NAME + '11', "43") self.assertTrue(os.path.exists(datatype.get_storage_file_path()), "Test file was not created!") def test_find_relative_path(self): """ Tests that relative path is computed properly. """ rel_path = self.files_helper.find_relative_path( "/root/up/to/here/test/it/now", "/root/up/to/here") self.assertEqual(rel_path, os.sep.join(["test", "it", "now"]), "Did not extract relative path as expected.") def test_remove_files_valid(self): """ Pass a valid list of files and check they are all removed. """ file_list = ["test1", "test2", "test3"] for file_n in file_list: fp = open(file_n, 'w') fp.write('test') fp.close() for file_n in file_list: self.assertTrue(os.path.isfile(file_n)) self.files_helper.remove_files(file_list) for file_n in file_list: self.assertFalse(os.path.isfile(file_n)) def test_remove_folder(self): """ Pass an open file pointer, but ignore exceptions. """ folder_name = "test_folder" os.mkdir(folder_name) self.assertTrue(os.path.isdir(folder_name), "Folder should be created.") self.files_helper.remove_folder(folder_name) self.assertFalse(os.path.isdir(folder_name), "Folder should be deleted.") def test_remove_folder_non_existing_ignore_exc(self): """ Pass an open file pointer, but ignore exceptions. """ folder_name = "test_folder" self.assertFalse(os.path.isdir(folder_name), "Folder should not exist before call.") self.files_helper.remove_folder(folder_name, ignore_errors=True) def test_remove_folder_non_existing(self): """ Pass an open file pointer, but ignore exceptions. """ folder_name = "test_folder" self.assertFalse(os.path.isdir(folder_name), "Folder should not exist before call.") self.assertRaises(FileStructureException, self.files_helper.remove_folder, folder_name, False)