def test_remove_zero_connectivity(): service = VolumeService() data = numpy.array([[[0, 0, 1], [2, 3, 0]], [[4, 0, 0], [0, 0, 0]]]) volume = Volume(data, [[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]], None) volume_path = get_temporary_files_path("tdi_lbl.nii.gz") IOUtils.write_volume(volume_path, volume) in_connectivity = numpy.array([[10, 1, 0, 3], [0, 10, 0, 2], [0, 0, 0, 0], [0, 0, 0, 10]]) connectivity_path = get_temporary_files_path("conn.csv") numpy.savetxt(connectivity_path, in_connectivity, fmt='%1d') tract_lengths_path = get_temporary_files_path("tract_lengths.csv") numpy.savetxt(tract_lengths_path, in_connectivity, fmt='%1d') service.remove_zero_connectivity_nodes(volume_path, connectivity_path, tract_lengths_path) assert os.path.exists(os.path.splitext(connectivity_path)[0] + ".npy") assert os.path.exists(os.path.splitext(tract_lengths_path)[0] + ".npy") vol = IOUtils.read_volume(volume_path) assert len(numpy.unique(vol.data)) == 4 conn = numpy.array(numpy.genfromtxt(connectivity_path, dtype='int64')) assert numpy.array_equal(conn, [[20, 1, 3], [1, 20, 2], [3, 2, 20]])
def test_remove_zero_connectivity(): service = VolumeService() data = numpy.array([[[0, 0, 1], [2, 3, 0]], [[4, 0, 0], [0, 0, 0]]]) volume = Volume(data, [[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]], None) volume_path = get_temporary_files_path("tdi_lbl.nii.gz") IOUtils.write_volume(volume_path, volume) in_connectivity = numpy.array( [[10, 1, 0, 3], [0, 10, 0, 2], [0, 0, 0, 0], [0, 0, 0, 10]]) connectivity_path = get_temporary_files_path("conn.csv") numpy.savetxt(connectivity_path, in_connectivity, fmt='%1d') tract_lengths_path = get_temporary_files_path("tract_lengths.csv") numpy.savetxt(tract_lengths_path, in_connectivity, fmt='%1d') service.remove_zero_connectivity_nodes( volume_path, connectivity_path, tract_lengths_path) assert os.path.exists(os.path.splitext(connectivity_path)[0] + ".npy") assert os.path.exists(os.path.splitext(tract_lengths_path)[0] + ".npy") vol = IOUtils.read_volume(volume_path) assert len(numpy.unique(vol.data)) == 4 conn = numpy.array(numpy.genfromtxt(connectivity_path, dtype='int64')) assert numpy.array_equal(conn, [[20, 1, 3], [1, 20, 2], [3, 2, 20]])
def test_label_with_dilation(): service = VolumeService() ct_mask_data = numpy.array([[[0, 0, 0], [0, 1, 0], [0, 1, 0]], [[1, 1, 1], [0, 0, 0], [0, 0, 0]], [[0, 0, 1], [0, 0, 0], [0, 0, 1]]]) ct_mask_volume = Volume( ct_mask_data, [[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]], None) ct_mask_path = get_temporary_files_path("ct_mask.nii.gz") IOUtils.write_volume(ct_mask_path, ct_mask_volume) ct_dil_mask_data = numpy.array([[[0, 0, 0], [1, 1, 1], [0, 1, 0]], [[1, 1, 1], [0, 0, 0], [0, 0, 0]], [[0, 1, 1], [0, 0, 0], [0, 1, 1]]]) ct_dil_mask_volume = Volume( ct_dil_mask_data, [[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]], None) ct_dil_mask_path = get_temporary_files_path("ct_dil_mask.nii.gz") IOUtils.write_volume(ct_dil_mask_path, ct_dil_mask_volume) ct_result = get_temporary_files_path("ct_res.nii.gz") service.label_with_dilation(ct_mask_path, ct_dil_mask_path, ct_result) assert os.path.exists(ct_mask_path) assert os.path.exists(ct_dil_mask_path) assert os.path.exists(ct_result) vol = IOUtils.read_volume(ct_result) assert numpy.array_equal(numpy.unique(vol.data), [0, 1, 2, 3])
def test_label_with_dilation(): service = VolumeService() ct_mask_data = numpy.array( [[[0, 0, 0], [0, 1, 0], [0, 1, 0]], [[1, 1, 1], [0, 0, 0], [0, 0, 0]], [[0, 0, 1], [0, 0, 0], [0, 0, 1]]]) ct_mask_volume = Volume(ct_mask_data, [[1, 0, 0, 0], [0, 1, 0, 0], [ 0, 0, 1, 0], [0, 0, 0, 1]], None) ct_mask_path = get_temporary_files_path("ct_mask.nii.gz") IOUtils.write_volume(ct_mask_path, ct_mask_volume) ct_dil_mask_data = numpy.array( [[[0, 0, 0], [1, 1, 1], [0, 1, 0]], [[1, 1, 1], [0, 0, 0], [0, 0, 0]], [[0, 1, 1], [0, 0, 0], [0, 1, 1]]]) ct_dil_mask_volume = Volume(ct_dil_mask_data, [[1, 0, 0, 0], [ 0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]], None) ct_dil_mask_path = get_temporary_files_path("ct_dil_mask.nii.gz") IOUtils.write_volume(ct_dil_mask_path, ct_dil_mask_volume) ct_result = get_temporary_files_path("ct_res.nii.gz") service.label_with_dilation(ct_mask_path, ct_dil_mask_path, ct_result) assert os.path.exists(ct_mask_path) assert os.path.exists(ct_dil_mask_path) assert os.path.exists(ct_result) vol = IOUtils.read_volume(ct_result)
def test_simple_label_config(): service = VolumeService() data = numpy.array( [[[0, 0, 1], [1, 2, 0]], [[2, 1, 1000], [1000, 1, 0]], [[0, 0, 1], [1, 2, 0]], [[2, 1, 1000], [3, 1, 0]]]) in_volume = Volume(data, [], None) out_volume = service._label_config(in_volume) assert numpy.array_equal(out_volume.data, [[[0, 0, 1], [1, 2, 0]], [[2, 1, 4], [4, 1, 0]], [[0, 0, 1], [1, 2, 0]], [[2, 1, 4], [3, 1, 0]]])
def test_label_vol_from_tdi(): service = VolumeService() data = numpy.array([[[0, 0, 1], [1, 2, 0]], [[2, 1, 3], [3, 1, 0]], [ [0, 0, 1], [1, 2, 0]], [[2, 1, 3], [3, 1, 0]]]) volume = Volume(data, [], None) labeled_volume = service._label_volume(volume, 0.5) assert numpy.array_equal(labeled_volume.data, [[[0, 0, 1], [2, 3, 0]], [[4, 5, 6], [7, 8, 0]], [[0, 0, 9], [10, 11, 0]], [[12, 13, 14], [15, 16, 0]]])
def gen_seeg_xyz_from_endpoints(self, scheme_fname: os.PathLike, out_fname: os.PathLike, transform_mat: Optional[os.PathLike]=None, src_img: Optional[os.PathLike]=None, dest_img: Optional[os.PathLike]=None): """ Read the file with electrode endpoints (`scheme_fname`) and write the file with position of the electrode contacts (`out_fname`), possibly including linear transformation given either by the transformation matrix (`transform_mat`) or by source and destination image (`src_img` and `dest_img`). Each electrode in the schema file should be described by one line containing the following fields: Name Target_x Target_y Target_z Entry_x Entry_y Entry_z Num_contacts [Spacing_pattern] Spacing pattern should be a double quoted string with distances between the neighbouring contacts. If there are more contacts than elements in the spacing pattern, the pattern is repeated. If absent, default spacing "3.5" is used. All distances should be in mm. """ DEFAULT_SPACING_PATTERN = "3.5" infile = open(scheme_fname, "r") outfile = open(out_fname, "w") for line in infile: line = line.strip() if not line or line[0] == '#': continue # Using csv.reader to allow for quoted strings items = next(reader([line], delimiter=' ', quotechar='"')) # Skip empty fields created by multiple delimiters items = [item for item in items if item != ""] if len(items) == 8: # Using default spacing pattern of 3.5 mm name, tgx, tgy, tgz, enx, eny, enz, ncontacts = items spacing_pattern_str = DEFAULT_SPACING_PATTERN elif len(items) == 9: name, tgx, tgy, tgz, enx, eny, enz, ncontacts, spacing_pattern_str = items else: raise ValueError("Unexpected number of items:\n%s" % line) target = numpy.array([float(x) for x in [tgx, tgy, tgz]]) entry = numpy.array([float(x) for x in [enx, eny, enz]]) ncontacts = int(ncontacts) spacing_pattern = [float(x) for x in spacing_pattern_str.split()] if transform_mat is not None: assert src_img is not None and dest_img is not None volume_service = VolumeService() target = volume_service.transform_coords(target, src_img, dest_img, transform_mat) entry = volume_service.transform_coords(entry, src_img, dest_img, transform_mat) contacts = self.gen_contacts_on_electrode(name, target, entry, ncontacts, spacing_pattern) for contact_name, pos in contacts: outfile.write("%-6s %7.2f %7.2f %7.2f\n" % (contact_name, pos[0], pos[1], pos[2])) infile.close() outfile.close()
def create_tvb_dataset(atlas_suffix: AtlasSuffix, mri_direc: os.PathLike, region_details_direc: os.PathLike, weights_file: os.PathLike, tracts_file: os.PathLike, out_dir: os.PathLike, bring_t1=False): weights_matrix = numpy.loadtxt(str(weights_file), dtype='i', delimiter=' ') weights_matrix += weights_matrix.T tracts_matrix = numpy.loadtxt(str(tracts_file), dtype='f', delimiter=' ') tracts_matrix += tracts_matrix.T is_cortical_rm = numpy.genfromtxt( os.path.join(region_details_direc, AsegFiles.CORTICAL_TXT.value.replace("%s", atlas_suffix)), usecols=[0], dtype='i') region_names = numpy.genfromtxt( os.path.join(region_details_direc, AsegFiles.CENTERS_TXT.value.replace("%s", atlas_suffix)), usecols=[0], dtype="str") region_centers = numpy.genfromtxt( os.path.join(region_details_direc, AsegFiles.CENTERS_TXT.value.replace("%s", atlas_suffix)), usecols=[1, 2, 3]) region_areas = numpy.genfromtxt( os.path.join(region_details_direc, AsegFiles.AREAS_TXT.value.replace("%s", atlas_suffix)), usecols=[0]) region_orientations = numpy.genfromtxt( os.path.join(region_details_direc, AsegFiles.ORIENTATIONS_TXT.value.replace("%s", atlas_suffix)), usecols=[0, 1, 2]) rm_idx = numpy.genfromtxt( os.path.join(region_details_direc, AsegFiles.RM_TO_APARC_ASEG_TXT.value.replace("%s", atlas_suffix)), usecols=[0, 1], dtype='i') rm_index_dict = dict(zip(rm_idx[:, 0], rm_idx[:, 1])) print(rm_index_dict) genericIO = GenericIO() genericIO.write_connectivity_zip(out_dir, weights_matrix, tracts_matrix, is_cortical_rm, region_names, region_centers, region_areas, region_orientations, atlas_suffix) aparc_aseg_file = os.path.join(mri_direc, T1Files.APARC_ASEG_NII_GZ.value.replace("%s", atlas_suffix)) aparc_aseg_volume = IOUtils.read_volume(aparc_aseg_file) volume_service = VolumeService() aparc_aseg_cor_volume = volume_service.change_labels_of_aparc_aseg(atlas_suffix, aparc_aseg_volume, rm_index_dict, weights_matrix.shape[0]) IOUtils.write_volume(os.path.join(out_dir, OutputConvFiles.APARC_ASEG_COR_NII_GZ.value.replace("%s", atlas_suffix)), aparc_aseg_cor_volume) if bring_t1: shutil.copy2(os.path.join(mri_direc, "T1.nii.gz"), out_dir)
def create_tvb_dataset(atlas_suffix: AtlasSuffix, mri_direc: os.PathLike, region_details_direc: os.PathLike, weights_file: os.PathLike, tracts_file: os.PathLike, out_dir: os.PathLike, bring_t1=False): weights_matrix = numpy.loadtxt(str(weights_file), dtype='i', delimiter=',') weights_matrix += weights_matrix.T tracts_matrix = numpy.loadtxt(str(tracts_file), dtype='f', delimiter=',') tracts_matrix += tracts_matrix.T is_cortical_rm = numpy.genfromtxt( os.path.join(region_details_direc, AsegFiles.CORTICAL_TXT.value.replace("%s", atlas_suffix)), usecols=[0], dtype='i') region_names = numpy.genfromtxt( os.path.join(region_details_direc, AsegFiles.CENTERS_TXT.value.replace("%s", atlas_suffix)), usecols=[0], dtype="str") region_centers = numpy.genfromtxt( os.path.join(region_details_direc, AsegFiles.CENTERS_TXT.value.replace("%s", atlas_suffix)), usecols=[1, 2, 3]) region_areas = numpy.genfromtxt( os.path.join(region_details_direc, AsegFiles.AREAS_TXT.value.replace("%s", atlas_suffix)), usecols=[0]) region_orientations = numpy.genfromtxt( os.path.join(region_details_direc, AsegFiles.ORIENTATIONS_TXT.value.replace("%s", atlas_suffix)), usecols=[0, 1, 2]) rm_idx = numpy.genfromtxt( os.path.join(region_details_direc, AsegFiles.RM_TO_APARC_ASEG_TXT.value.replace("%s", atlas_suffix)), usecols=[0, 1], dtype='i') rm_index_dict = dict(zip(rm_idx[:, 0], rm_idx[:, 1])) print(rm_index_dict) genericIO = GenericIO() genericIO.write_connectivity_zip(out_dir, weights_matrix, tracts_matrix, is_cortical_rm, region_names, region_centers, region_areas, region_orientations, atlas_suffix) aparc_aseg_file = os.path.join(mri_direc, T1Files.APARC_ASEG_NII_GZ.value.replace("%s", atlas_suffix)) aparc_aseg_volume = IOUtils.read_volume(aparc_aseg_file) volume_service = VolumeService() aparc_aseg_cor_volume = volume_service.change_labels_of_aparc_aseg(atlas_suffix, aparc_aseg_volume, rm_index_dict, weights_matrix.shape[0]) IOUtils.write_volume(os.path.join(out_dir, OutputConvFiles.APARC_ASEG_COR_NII_GZ.value.replace("%s", atlas_suffix)), aparc_aseg_cor_volume) if bring_t1: shutil.copy2(os.path.join(mri_direc, "T1.nii.gz"), out_dir)
from tvb.recon.algo.service.subparcellation import SubparcellationService from tvb.recon.algo.service.sensor import SensorService from tvb.recon.algo.service.annotation import AnnotationService, DEFAULT_LUT try: import gdist except ImportError: warnings.warn( 'Geodesic distance module unavailable; please pip install gdist.') SUBJECTS_DIR, SUBJECT, FREESURFER_HOME = [os.environ[ key] for key in 'SUBJECTS_DIR SUBJECT FREESURFER_HOME'.split()] surfaceService = SurfaceService() volumeService = VolumeService() subparcelatioService = SubparcellationService() sensorService = SensorService() annotationService = AnnotationService() def gen_head_model(): sensorService.gen_head_model() #-----------------------------Freesurfer surfaces------------------------------ def convert_fs_to_brain_visa(fs_surf): surfaceService.convert_fs_to_brain_visa(fs_surf)