Beispiel #1
0
    def label_with_dilation(self, to_label_nii_fname: os.PathLike, dilated_nii_fname: os.PathLike,
                            out_nii_fname: os.PathLike):
        """
        Labels a volume using its labeled dilation. The dilated volume is labeled using scipy.ndimage.label function.
        :param to_label_nii_fname: usually a CT-mask.nii.gz
        :param dilated_nii_fname: dilated version of the to_label_nii_fname volume
        """

        # TODO could make dilation with ndimage also.
        mask = IOUtils.read_volume(to_label_nii_fname)
        dil_mask = IOUtils.read_volume(dilated_nii_fname)

        lab, n = scipy.ndimage.label(dil_mask.data)

        # TODO: this change is from tvb-make. Keep it or not? It returns a different result than the old version.
        lab_xyz = list(self.compute_label_volume_centers(lab, dil_mask.affine_matrix))
        lab_sort = numpy.r_[:n + 1]
        # sort labels along AP axis
        for i, (val, _) in enumerate(sorted(lab_xyz, key=lambda t: t[1][1])):
            lab_sort[val] = i
        lab = lab_sort[lab]

        mask.data *= lab
        self.logger.info(
            '%d objects found when labeling the dilated volume.', n)

        IOUtils.write_volume(out_nii_fname, mask)
Beispiel #2
0
def test_remove_zero_connectivity():
    service = VolumeService()

    data = numpy.array([[[0, 0, 1], [2, 3, 0]], [[4, 0, 0], [0, 0, 0]]])
    volume = Volume(data, [[1, 0, 0, 0], [0, 1, 0, 0],
                           [0, 0, 1, 0], [0, 0, 0, 1]], None)
    volume_path = get_temporary_files_path("tdi_lbl.nii.gz")

    IOUtils.write_volume(volume_path, volume)

    in_connectivity = numpy.array(
        [[10, 1, 0, 3], [0, 10, 0, 2], [0, 0, 0, 0], [0, 0, 0, 10]])
    connectivity_path = get_temporary_files_path("conn.csv")
    numpy.savetxt(connectivity_path, in_connectivity, fmt='%1d')

    tract_lengths_path = get_temporary_files_path("tract_lengths.csv")
    numpy.savetxt(tract_lengths_path, in_connectivity, fmt='%1d')

    service.remove_zero_connectivity_nodes(
        volume_path, connectivity_path, tract_lengths_path)

    assert os.path.exists(os.path.splitext(connectivity_path)[0] + ".npy")
    assert os.path.exists(os.path.splitext(tract_lengths_path)[0] + ".npy")

    vol = IOUtils.read_volume(volume_path)
    assert len(numpy.unique(vol.data)) == 4

    conn = numpy.array(numpy.genfromtxt(connectivity_path, dtype='int64'))
    assert numpy.array_equal(conn, [[20, 1, 3], [1, 20, 2], [3, 2, 20]])
Beispiel #3
0
    def overlap_3_volumes(self, background_path: os.PathLike, overlay_1_path: os.PathLike,
                          overlay_2_path: os.PathLike, use_cc_point: bool,
                          snapshot_name: str=SNAPSHOT_NAME):

        volume_background = IOUtils.read_volume(background_path)
        volume_overlay_1 = IOUtils.read_volume(overlay_1_path)
        volume_overlay_2 = IOUtils.read_volume(overlay_2_path)

        if use_cc_point:
            ras = self.generic_io.get_ras_coordinates(
                self.read_t1_affine_matrix())
        else:
            ras = volume_background.get_center_point()

        for projection in PROJECTIONS:
            try:
                x, y, background_matrix = volume_background.slice_volume(
                    projection, ras)
                x1, y1, overlay_1_matrix = volume_overlay_1.slice_volume(
                    projection, ras)
                x2, y2, overlay_2_matrix = volume_overlay_2.slice_volume(
                    projection, ras)
            except IndexError:
                new_ras = volume_background.get_center_point()
                x, y, background_matrix = volume_background.slice_volume(
                    projection, new_ras)
                x1, y1, overlay_1_matrix = volume_overlay_1.slice_volume(
                    projection, new_ras)
                x2, y2, overlay_2_matrix = volume_overlay_2.slice_volume(
                    projection, new_ras)
                self.logger.info("The volume center point has been used for %s snapshot of %s, %s and %s.", projection,
                                 background_path, overlay_1_path, overlay_2_path)

            self.writer.write_3_matrices(x, y, background_matrix, x1, y1, overlay_1_matrix, x2, y2, overlay_2_matrix,
                                         self.generate_file_name(projection, snapshot_name))
Beispiel #4
0
def test_label_with_dilation():
    service = VolumeService()

    ct_mask_data = numpy.array(
        [[[0, 0, 0], [0, 1, 0], [0, 1, 0]], [[1, 1, 1], [0, 0, 0], [0, 0, 0]], [[0, 0, 1], [0, 0, 0], [0, 0, 1]]])
    ct_mask_volume = Volume(ct_mask_data, [[1, 0, 0, 0], [0, 1, 0, 0], [
                            0, 0, 1, 0], [0, 0, 0, 1]], None)
    ct_mask_path = get_temporary_files_path("ct_mask.nii.gz")
    IOUtils.write_volume(ct_mask_path, ct_mask_volume)

    ct_dil_mask_data = numpy.array(
        [[[0, 0, 0], [1, 1, 1], [0, 1, 0]], [[1, 1, 1], [0, 0, 0], [0, 0, 0]], [[0, 1, 1], [0, 0, 0], [0, 1, 1]]])
    ct_dil_mask_volume = Volume(ct_dil_mask_data, [[1, 0, 0, 0], [
                                0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]], None)
    ct_dil_mask_path = get_temporary_files_path("ct_dil_mask.nii.gz")
    IOUtils.write_volume(ct_dil_mask_path, ct_dil_mask_volume)

    ct_result = get_temporary_files_path("ct_res.nii.gz")

    service.label_with_dilation(ct_mask_path, ct_dil_mask_path, ct_result)

    assert os.path.exists(ct_mask_path)
    assert os.path.exists(ct_dil_mask_path)
    assert os.path.exists(ct_result)

    vol = IOUtils.read_volume(ct_result)
Beispiel #5
0
    def overlap_volume_surfaces(self, volume_background: os.PathLike, surfaces_path: os.PathLike,
                                use_center_surface: bool, use_cc_point: bool, snapshot_name: str=SNAPSHOT_NAME):
        volume = IOUtils.read_volume(volume_background)

        if use_cc_point:
            ras = self.generic_io.get_ras_coordinates(
                self.read_t1_affine_matrix())
        else:
            ras = volume.get_center_point()

        surfaces = [IOUtils.read_surface(os.path.expandvars(surface), use_center_surface) for surface in
                    surfaces_path]

        for projection in PROJECTIONS:
            try:
                x, y, background_matrix = volume.slice_volume(projection, ras)
            except IndexError:
                ras = volume.get_center_point()
                x, y, background_matrix = volume.slice_volume(projection, ras)
                self.logger.info("The volume center point has been used for %s snapshot of %s and %s.", projection,
                                 volume_background, surfaces_path)

            clear_flag = True
            for surface_index, surface in enumerate(surfaces):
                surf_x_array, surf_y_array = surface.cut_by_plane(
                    projection, ras)
                self.writer.write_matrix_and_surfaces(x, y, background_matrix, surf_x_array, surf_y_array,
                                                      surface_index, clear_flag)
                clear_flag = False
            self.writer.save_figure(
                self.generate_file_name(projection, snapshot_name))
Beispiel #6
0
def test_write_volume():
    in_file_path = get_data_file(
        TEST_MODIF_SUBJECT, TEST_VOLUME_FOLDER, "T1.nii.gz")
    volume = IOUtils.read_volume(in_file_path)
    out_file_path = get_temporary_files_path('T1-out.nii.gz')
    IOUtils.write_volume(out_file_path, volume)
    assert os.path.exists(out_file_path)
 def test_write_annotation(self):
     file_path = get_data_file(
         self.subject, self.annot_path, "lh.aparc.annot")
     annotation = IOUtils.read_annotation(file_path)
     out_annotation_path = self.temp_file_path("lh-test.aparc.annot")
     IOUtils.write_annotation(out_annotation_path, annotation)
     new_annotation = IOUtils.read_annotation(out_annotation_path)
     self.assertEqual(annotation.region_names, new_annotation.region_names)
def test_write_write_brain_visa_surf():
    surface_path = get_data_file(
        TEST_FS_SUBJECT, TEST_SURFACE_FOLDER, "lh.pial")
    out_path = get_temporary_files_path("lh.pial.tri")

    surface = IOUtils.read_surface(surface_path, False)
    IOUtils.write_surface(out_path, surface)

    assert os.path.exists(out_path)
def test_write_fs_surface():
    file_path = get_data_file(TEST_FS_SUBJECT, TEST_SURFACE_FOLDER, "lh.pial")
    original_surface = IOUtils.read_surface(file_path, False)
    triangles_number = len(original_surface.triangles)

    output_file_path = get_temporary_files_path("lh-test.pial")
    IOUtils.write_surface(output_file_path, original_surface)

    new_surface = IOUtils.read_surface(output_file_path, False)
    assert triangles_number == len(new_surface.triangles) == 327680
Beispiel #10
0
    def simple_label_config(self, in_aparc_path: os.PathLike, out_volume_path: os.PathLike):
        """
        Relabel volume to have contiguous values like Mrtrix' labelconfig.
        :param in_aparc_path: volume voxel value is the index of the region it belongs to.
        :return: writes the labeled volume to out_volume_path.
        """

        aparc = IOUtils.read_volume(in_aparc_path)
        aparc = self._label_config(aparc)
        IOUtils.write_volume(out_volume_path, aparc)
Beispiel #11
0
    def label_vol_from_tdi(self, tdi_volume_path: os.PathLike, out_volume_path: os.PathLike, lo: float=0.5):
        """
        Creates a mask of the voxels with tract ends > lo and any other voxels become 0.
        Labels each voxel different from 0 with integer labels starting from 1.
        :param tdi_volume_path: volume voxel value is the sum of tract ends. Voxel without tract ends has value 0.
        :param lo: tract ends threshold used for masking.
        :return: writes labeled volume to :ut_volume_path.
        """

        nii_volume = IOUtils.read_volume(tdi_volume_path)
        tdi_volume = self._label_volume(nii_volume, lo)
        IOUtils.write_volume(out_volume_path, tdi_volume)
Beispiel #12
0
    def show_aparc_aseg_with_new_values(
            self, aparc_aseg_volume_path: os.PathLike, region_values_path: os.PathLike,
            background_volume_path: os.PathLike, use_cc_point: bool,
            fs_to_conn_indices_mapping_path: os.PathLike=FS_TO_CONN_INDICES_MAPPING_PATH,
            snapshot_name: str=SNAPSHOT_NAME):
        """

        Parameters
        ----------
        aparc_aseg_volume_path
        region_values_path
        background_volume_path
        use_cc_point
        fs_to_conn_indices_mapping_path
        snapshot_name

        Returns
        -------

        """

        aparc_aseg_volume = IOUtils.read_volume(aparc_aseg_volume_path)

        fs_to_conn_indices_mapping = {}
        with open(fs_to_conn_indices_mapping_path, 'r') as fd:
            for line in fd.readlines():
                key, _, val = line.strip().split()
                fs_to_conn_indices_mapping[int(key)] = int(val)

        len_fs_conn = len(fs_to_conn_indices_mapping)

        conn_measure = np.loadtxt(region_values_path)
        npad = len_fs_conn - conn_measure.size
        conn_measure = np.pad( conn_measure, (0, npad), 'constant')

        if use_cc_point:
            ras = self.generic_io.get_ras_coordinates(
                self.read_t1_affine_matrix())
        else:
            ras = aparc_aseg_volume.get_center_point()

        background_volume = None
        if background_volume_path:
            background_volume = IOUtils.read_volume(background_volume_path)

        for projection in PROJECTIONS:
            self._aparc_aseg_projection(
                aparc_aseg_volume, aparc_aseg_volume_path, projection, ras,
                fs_to_conn_indices_mapping,
                background_volume, background_volume_path,
                snapshot_name, conn_measure
            )
Beispiel #13
0
    def remove_zero_connectivity_nodes(self, node_volume_path: os.PathLike, connectivity_matrix_path: os.PathLike,
                                       tract_length_path: Optional[str]=None):
        """
        It removes network nodes with zero connectivity from the volume and connectivity matrices.
        The zero connectivity nodes will be labeled with 0 in the volume and the remaining labels will be updated.
        The connectivity matrices will be symmetric.
        :param node_volume_path: tdi_lbl.nii volume path
        :param connectivity_matrix_path: .csv file, output of Mrtrix3 tck2connectome
        :param tract_length_path: optional .csv tract lengths matrix
        :return: overwrites the input volume and matrices with the processed ones. Also saves matrices as .npy.
        """

        node_volume = IOUtils.read_volume(node_volume_path)

        connectivity = numpy.array(numpy.genfromtxt(
            connectivity_matrix_path, dtype='int64'))
        connectivity = connectivity + connectivity.T
        connectivity_row_sum = numpy.sum(connectivity, axis=0)

        nodes_to_keep_indices = connectivity_row_sum > 0
        connectivity = connectivity[nodes_to_keep_indices, :][
                       :, nodes_to_keep_indices]

        numpy.save(os.path.splitext(connectivity_matrix_path)
                   [0] + NPY_EXTENSION, connectivity)
        numpy.savetxt(connectivity_matrix_path, connectivity, fmt='%1d')

        if os.path.exists(str(tract_length_path)):
            connectivity = numpy.array(numpy.genfromtxt(
                tract_length_path, dtype='int64'))
            connectivity = connectivity[nodes_to_keep_indices, :][
                           :, nodes_to_keep_indices]

            numpy.save(os.path.splitext(tract_length_path)
                       [0] + NPY_EXTENSION, connectivity)
            numpy.savetxt(tract_length_path, connectivity, fmt='%1d')

        else:
            self.logger.warning("Path %s is not valid.", tract_length_path)

        nodes_to_remove_indices, = numpy.where(~nodes_to_keep_indices)
        nodes_to_remove_indices += 1

        for node_index in nodes_to_remove_indices:
            node_volume.data[node_volume.data == node_index] = 0

        node_volume.data[node_volume.data > 0] = numpy.r_[
                                                 1:(connectivity.shape[0] + 1)]

        IOUtils.write_volume(node_volume_path, node_volume)
Beispiel #14
0
 def test_overlap_surface_annotation(self):
     writer = ImageWriter(SNAPSHOTS_DIRECTORY)
     surface_path = get_data_file(self.head2, "SurfaceCortical.h5")
     surface = IOUtils.read_surface(surface_path, False)
     annot_path = get_data_file(self.head2, "RegionMapping.h5")
     annot = IOUtils.read_annotation(annot_path)
     annot.region_names = ['reg1', 'reg2']
     annot.regions_color_table = numpy.array(
         [[200, 200, 200, 255, 30567], [100, 150, 200, 255, 30568]])
     resulted_file_name = self.processor.generate_file_name(
         'surface_annotation', SNAPSHOT_NAME)
     writer.write_surface_with_annotation(surface, annot, resulted_file_name)
     fname = '%s0' % (resulted_file_name, )
     self._assert_writer_path_exists(fname)
Beispiel #15
0
    def test_merge_surfaces(self,):
        h5_surface_path = get_data_file("head2", "SurfaceCortical.h5")
        h5_surface = IOUtils.read_surface(h5_surface_path, False)

        vtx = h5_surface.vertices
        tri = h5_surface.triangles
        nv = vtx.size
        nt = tri.size
        nv2 = int(nv / 2)
        nt2 = int(nt / 2)

        lh_surface = Surface(vtx[:nv2], tri[:nt2])
        rh_surface = Surface(vtx[nv2:], tri[nt2:])

        # h5_region_mapping_path = get_data_file("head2", "RegionMapping.h5")
        # annotation = IOUtils.read_annotation(h5_region_mapping_path)
        #
        # lh_region_mapping = annotation.region_mapping[:len(annotation.region_mapping) / 2]
        # rh_region_mapping = annotation.region_mapping[len(annotation.region_mapping) / 2:]

        out_surface = self.service.merge_surfaces([lh_surface, rh_surface])
        self.assertEqual(len(out_surface.vertices),
                         len(lh_surface.vertices) + len(rh_surface.vertices))
        self.assertEqual(len(out_surface.triangles),
                         len(lh_surface.triangles) + len(rh_surface.triangles))
Beispiel #16
0
 def annot_to_conn_conf(self, annot_path, type, conn_conf_path, first_idx=0):
     annotation_lh = IOUtils.read_annotation(os.path.join(annot_path, "lh." + type + ".annot"))
     annotation_rh = IOUtils.read_annotation(os.path.join(annot_path, "rh." + type + ".annot"))
     with open(conn_conf_path, 'w') as fd:
         for id, name in enumerate(annotation_lh.region_names):
             if type == "aparc" and name != "unknown":
                 name = "lh-" + name
             fd.write('%d\t%s\n' % (id + first_idx, name))
         first_idx += len(annotation_lh.region_names)
         for id, name in enumerate(annotation_rh.region_names):
             if (name == "unknown"):
                 first_idx -= 1
                 continue
             if type == "aparc" and name != "unknown":
                 name = "rh-" + name
             fd.write('%d\t%s\n' % (id + first_idx, name))
     return first_idx + len(annotation_rh.region_names)
Beispiel #17
0
def create_tvb_dataset(atlas_suffix: AtlasSuffix, mri_direc: os.PathLike,
                       region_details_direc: os.PathLike,
                       weights_file: os.PathLike,
                       tracts_file: os.PathLike,
                       out_dir: os.PathLike,
                       bring_t1=False):
    weights_matrix = numpy.loadtxt(str(weights_file), dtype='i', delimiter=' ')
    weights_matrix += weights_matrix.T

    tracts_matrix = numpy.loadtxt(str(tracts_file), dtype='f', delimiter=' ')
    tracts_matrix += tracts_matrix.T

    is_cortical_rm = numpy.genfromtxt(
        os.path.join(region_details_direc, AsegFiles.CORTICAL_TXT.value.replace("%s", atlas_suffix)), usecols=[0],
        dtype='i')
    region_names = numpy.genfromtxt(
        os.path.join(region_details_direc, AsegFiles.CENTERS_TXT.value.replace("%s", atlas_suffix)), usecols=[0],
        dtype="str")
    region_centers = numpy.genfromtxt(
        os.path.join(region_details_direc, AsegFiles.CENTERS_TXT.value.replace("%s", atlas_suffix)), usecols=[1, 2, 3])
    region_areas = numpy.genfromtxt(
        os.path.join(region_details_direc, AsegFiles.AREAS_TXT.value.replace("%s", atlas_suffix)), usecols=[0])
    region_orientations = numpy.genfromtxt(
        os.path.join(region_details_direc, AsegFiles.ORIENTATIONS_TXT.value.replace("%s", atlas_suffix)),
        usecols=[0, 1, 2])
    rm_idx = numpy.genfromtxt(
        os.path.join(region_details_direc, AsegFiles.RM_TO_APARC_ASEG_TXT.value.replace("%s", atlas_suffix)),
        usecols=[0, 1], dtype='i')
    rm_index_dict = dict(zip(rm_idx[:, 0], rm_idx[:, 1]))
    print(rm_index_dict)

    genericIO = GenericIO()
    genericIO.write_connectivity_zip(out_dir, weights_matrix, tracts_matrix, is_cortical_rm, region_names,
                                     region_centers, region_areas, region_orientations, atlas_suffix)

    aparc_aseg_file = os.path.join(mri_direc, T1Files.APARC_ASEG_NII_GZ.value.replace("%s", atlas_suffix))
    aparc_aseg_volume = IOUtils.read_volume(aparc_aseg_file)

    volume_service = VolumeService()
    aparc_aseg_cor_volume = volume_service.change_labels_of_aparc_aseg(atlas_suffix, aparc_aseg_volume, rm_index_dict,
                                                                       weights_matrix.shape[0])
    IOUtils.write_volume(os.path.join(out_dir, OutputConvFiles.APARC_ASEG_COR_NII_GZ.value.replace("%s", atlas_suffix)),
                         aparc_aseg_cor_volume)

    if bring_t1:
        shutil.copy2(os.path.join(mri_direc, "T1.nii.gz"), out_dir)
Beispiel #18
0
def test_read_transformation_matrix_from_gifti_metadata():
    file_path = get_data_file(
        TEST_MODIF_SUBJECT, TEST_SURFACE_FOLDER, "lh.pial.gii")
    surface_io = IOUtils.surface_io_factory(file_path)
    surf = surface_io.read(file_path, False)
    matrix = surface_io.read_transformation_matrix_from_metadata(
        surf.get_main_metadata())
    assert matrix == [[-1, 0, 0, 0], [0, 0, 1, 0], [0, -1, 0, 0], [0, 0, 0, 1]]
Beispiel #19
0
def test_write_transformation_matrix_fs_metadata():
    file_path = get_data_file(TEST_FS_SUBJECT, TEST_SURFACE_FOLDER, "lh.pial")
    surface_io = IOUtils.surface_io_factory(file_path)
    surf = surface_io.read(file_path, False)
    surface_io.write_transformation_matrix(surf.get_main_metadata())
    matrix = surface_io.read_transformation_matrix_from_metadata(
        surf.get_main_metadata())
    assert matrix.tolist() == [[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [
        0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]]
Beispiel #20
0
    def aseg_surf_conc_annot(self, surf_path: str, out_surf_path: str, annot_path: str,
                             label_indices: Union[numpy.ndarray, list], lut_path: Optional[str]=None) -> Surface:
        """
        Concatenate surfaces of one specific label of interest each, to create a single annotated surface.
        """

        lut_path = lut_path or default_lut_path()

        label_names, color_table = self.annotation_service.lut_to_annot_names_ctab(lut_path=lut_path,
                                                                                   labels=label_indices)
        label_indices = numpy.array(label_indices.split()).astype('i')

        #                  verts tri area_mask cras
        surfaces = []
        out_annotation = Annotation([], [], [])
        label_number = -1

        for label_index in label_indices:
            # TODO: This is hardcoded: /aseg-%06d here and also in pegasus dax generator
            this_surf_path = surf_path + "/aseg-%06d" % int(label_index)

            if os.path.exists(this_surf_path):
                ind_l, = numpy.where(label_indices == label_index)
                out_annotation.add_region_names_and_colors(
                    label_names[int(ind_l)],
                    color_table[ind_l, :])
                label_number += 1
                surfaces.append(IOUtils.read_surface(this_surf_path, False))
                out_annotation.add_region_mapping(
                    label_number * numpy.ones((surfaces[-1].n_vertices,), dtype='int64'))
        out_surface = self.merge_surfaces(surfaces)
        # out_annotation.regions_color_table = numpy.squeeze(numpy.array(out_annotation.regions_color_table).astype('i'))

        IOUtils.write_surface(out_surf_path, out_surface)
        IOUtils.write_annotation(annot_path, out_annotation)

        return out_surface
Beispiel #21
0
    def compute_gdist_mat(self, surf_name: str='pial', max_distance: float=40.0) -> numpy.ndarray:
        max_distance = float(max_distance)  # in case passed from sys.argv
        for h in 'rl':
            subjects_dir = os.environ['SUBJECTS_DIR']
            subject = os.environ['SUBJECT']
            surf_path = '%s/%s/surf/%sh.%s' % (subjects_dir,
                                               subject, h, surf_name)
            surface = IOUtils.read_surface(surf_path, False)
            mat_path = '%s/%s/surf/%sh.%s.gdist.mat' % (
                subjects_dir, subject, h, surf_name)
            mat = gdist.local_gdist_matrix(
                surface.vertices, surface.triangles.astype('<i4'), max_distance=max_distance)
            scipy.io.savemat(mat_path, {'gdist': mat})

            return mat
Beispiel #22
0
    def test_aseg_surf_conc_annot(self,):
        out_surf_path = get_temporary_files_path("out_aseg")
        out_annot_path = get_temporary_files_path("out_annot")
        labels = "10 11"
        colorLUT = get_data_file("colorLUT.txt")
        self.service.aseg_surf_conc_annot(
            data_path, out_surf_path, out_annot_path, labels, colorLUT)
        self.assertTrue(os.path.exists(out_surf_path))
        self.assertTrue(os.path.exists(out_annot_path))

        surface_parser = FreesurferIO()
        surface = surface_parser.read(out_surf_path, False)
        self.assertEqual(len(surface.vertices), 5714)
        self.assertEqual(len(surface.triangles), 11420)

        annotation = IOUtils.read_annotation(out_annot_path)
        assert_array_equal(
                annotation.regions_color_table,
                [[0, 118, 14, 0, 947712], [122, 186, 220, 0, 14465658]])
Beispiel #23
0
 def con_vox_in_ras(self, ref_vol_path: os.PathLike) -> (numpy.ndarray, numpy.ndarray):
     """
     This function reads a tdi_lbl volume and returns the voxels that correspond to connectome nodes,
     and their coordinates in ras space, simply by applying the affine transform of the volume
     :param ref_vol_path: the path to the tdi_lbl volume
     :return: vox and voxxyz,
             i.e., the labels (integers>=1) and the coordinates of the connnectome nodes-voxels, respectively
     """
     # Read the reference tdi_lbl volume:
     vollbl = IOUtils.read_volume(ref_vol_path)
     vox = vollbl.data.astype('i')
     # Get only the voxels that correspond to connectome nodes:
     voxijk, = numpy.where(vox.flatten() > 0)
     voxijk = numpy.unravel_index(voxijk, vollbl.dimensions)
     vox = vox[voxijk[0], voxijk[1], voxijk[2]]
     # ...and their coordinates in ras xyz space
     voxxzy = vollbl.affine_matrix.dot(numpy.c_[voxijk[0], voxijk[1], voxijk[
         2], numpy.ones(vox.shape[0])].T)[:3].T
     return vox, voxxzy
Beispiel #24
0
    def show_single_volume(self, volume_path: os.PathLike, use_cc_point: bool,
                           snapshot_name: os.PathLike=SNAPSHOT_NAME):

        volume = IOUtils.read_volume(volume_path)

        if use_cc_point:
            ras = self.generic_io.get_ras_coordinates(
                self.read_t1_affine_matrix())
        else:
            ras = volume.get_center_point()

        for projection in PROJECTIONS:
            try:
                x_axis_coords, y_axis_coords, volume_matrix = volume.slice_volume(
                    projection, ras)
            except IndexError:
                new_ras = volume.get_center_point()
                x_axis_coords, y_axis_coords, volume_matrix = volume.slice_volume(
                    projection, new_ras)
                self.logger.info("The volume center point has been used for %s snapshot of %s.", projection,
                                 volume_path)

            self.writer.write_matrix(x_axis_coords, y_axis_coords, volume_matrix,
                                     self.generate_file_name(projection, snapshot_name))
 def test_parse_h5_annotation(self):
     h5_path = get_data_file('head2', 'RegionMapping.h5')
     annotation = IOUtils.read_annotation(h5_path)
     self.assertEqual(annotation.region_mapping.size, 16)
 def test_parse_not_annotation(self):
     file_path = get_data_file(self.subject, "surf", "lh.pial")
     annotation_io = IOUtils.annotation_io_factory(file_path)
     self.assertRaises(ValueError, annotation_io.read, file_path)
 def test_parse_not_existent_annotation(self):
     file_path = "not_existent_annotation.annot"
     annotation_io = IOUtils.annotation_io_factory(file_path)
     self.assertRaises(IOError, annotation_io.read, file_path)
 def test_parse_annotation(self):
     file_path = get_data_file(
         self.subject, self.annot_path, "lh.aparc.annot")
     annot = IOUtils.read_annotation(file_path)
     self.assertEqual(_expected_region_names, annot.region_names)

if __name__ == "__main__":
    args = parse_arguments()

    surface_path = os.path.expandvars(args.surface_path)
    output_path = os.path.expandvars(args.output_path)

    logger = get_logger(__name__)

    image_processor = ImageProcessor(snapshots_directory=os.environ[SNAPSHOTS_DIRECTORY_ENVIRON_VAR],
                                     snapshot_count=int(os.environ.get(SNAPSHOT_NUMBER_ENVIRON_VAR, 0)))
    generic_io = GenericIO()

    logger.info("The surface transformation process has began")
    surface_io = IOUtils.surface_io_factory(surface_path)
    surface = surface_io.read(surface_path, False)

    if len(args.matrix_paths) is not 0:
        transformation_matrices = []

        for transform_matrix_path in args.matrix_paths:
            transformation_matrices.append(
                numpy.array(generic_io.read_transformation_matrix(os.path.expandvars(transform_matrix_path))))

        for i in range(len(surface.vertices)):
            for j in range(len(transformation_matrices)):
                if len(transformation_matrices[j]) > 3:
                    vertex_coords = numpy.array(
                        [surface.vertices[i][0], surface.vertices[i][1], surface.vertices[i][2], 1])
                else:
Beispiel #30
0
    def annot_to_lut(self,
                     annot_path,
                     lut_path=None,
                     subject=None,
                     prefix=''):
        """
        This function creates from an annotation a new lut_file, or adds new
        entries to an existing lut file. In the latter case, new entries have
        labels greater than the maximum alredy existing label inside the lut
        file.

        Parameters
        ----------
        annot_path : str, os.PathLike
            path to annotation.
        lut_path : str, os.PathLike
            path to existing or new LUT file.
        subject : str, optional
            subject name if provided, otherwise env var $SUBJECT is used
        prefiw : str, optional
            prefix for region names (i.e., "ctx-lh-")

        """
        annotation = IOUtils.read_annotation(annot_path)
        subject = subject or os.environ['SUBJECT']
        # If this is an already existing lut file:
        lut_path = lut_path or default_lut_path()
        if os.path.isfile(lut_path):
            # ...find the maximum label in it and add 1
            add_lbl = 1 + \
                      numpy.max(self.read_lut(
                          lut_path=lut_path, key_mode='label')[0])
        else:
            # ...else, set it to 0
            add_lbl = 0
        with open(lut_path, 'a') as fd:
            if add_lbl == 0:
                # TODO: we should include an environment variable for
                # freesurfer version, and print it here
                fd.write("#$Id: %s %s\n\n" % (lut_path, datetime.now()))
                fd.write('#No.\tLabel Name: \tR   G   B   A   \n')
            else:
                fd.write('\n')
            fd.write("""
#Patient: {subject}
#User: {user}
#Annotation path: {annot_path}
#Time: {time}

""".format(
                subject=subject,
                user=os.path.split(os.path.expanduser('~'))[-1],
                annot_path=annot_path,
                time=datetime.now())
            )
            # TODO: align columns
            # NOTE!!! that the fourth and fifth columns of color_table are not
            # used in the lut file!!!
            for name, (r, g, b, dummy1, dummy2), lbl in \
                    zip(annotation.region_names, annotation.regions_color_table,
                        list(range(len(annotation.region_names)))):
                fd.write('%d\t%s\t%d %d %d %d\n' %
                         (lbl + add_lbl, prefix + name, r, g, b, 0))
Beispiel #31
0
    def sample_vol_on_surf(self, surf_path: str, vol_path: str, annot_path: str, out_surf_path: str,
                           cras_path: str, add_string: str='', vertex_neighbourhood: int=1,
                           add_lbl: list=[], lut_path: Optional[str]=None) -> (Surface, Annotation):
        """
        Sample a volume of a specific label on a surface, by keeping only those surface vertices, the nearest voxel of
        which is of the given label (+ of possibly additional target labels, such as white matter).
        Allow optionally for vertices within a given voxel distance vn from the target voxels.
        """

        lut_path = lut_path or default_lut_path()

        # Read the inputs
        surface = IOUtils.read_surface(surf_path, False)

        annotation = IOUtils.read_annotation(annot_path)
        labels = self.annotation_service.annot_names_to_labels(annotation.region_names,
                                                               add_string=add_string, lut_path=lut_path)
        region_mapping_indexes = numpy.unique(annotation.region_mapping)

        volume_parser = VolumeIO()
        volume = volume_parser.read(vol_path)
        ras2vox_affine_matrix = numpy.linalg.inv(volume.affine_matrix)

        cras = numpy.loadtxt(cras_path)

        grid, n_grid = self.__prepare_grid(vertex_neighbourhood)

        # Initialize the output mask:
        verts_out_mask = numpy.repeat([False], surface.vertices.shape[0])
        for label_index in range(len(region_mapping_indexes)):

            self.logger.info("%s", add_string +
                             annotation.region_names[label_index])

            # Get the indexes of the vertices corresponding to this label:
            verts_indices_of_label, = numpy.where(
                annotation.region_mapping[:] == region_mapping_indexes[label_index])
            verts_indices_of_label_size = verts_indices_of_label.size
            if verts_indices_of_label_size == 0:
                continue

            # Add any additional labels
            all_labels = [labels[label_index]] + add_lbl

            # get the vertices for current label and add cras to take them to
            # scanner ras
            verts_of_label = surface.vertices[verts_indices_of_label, :]
            verts_of_label += numpy.repeat(numpy.expand_dims(
                cras, 1).T, verts_indices_of_label_size, axis=0)

            # Compute the nearest voxel coordinates using the affine transform
            ijk = numpy.round(
                ras2vox_affine_matrix.dot(numpy.c_[verts_of_label, numpy.ones(verts_indices_of_label_size)].T)[:3].T) \
                .astype('i')

            # Get the labels of these voxels:
            surf_vxls = volume.data[ijk[:, 0], ijk[:, 1], ijk[:, 2]]

            # Vertex mask to keep: those that correspond to voxels of one of
            # the target labels
            # surf_vxls==lbl if only one target label
            verts_keep, = numpy.where(numpy.in1d(surf_vxls, all_labels))
            verts_out_mask[verts_indices_of_label[verts_keep]] = True

            if vertex_neighbourhood > 0:
                # These are now the remaining indexes to be checked for
                # neighboring voxels
                verts_indices_of_label = numpy.delete(
                    verts_indices_of_label, verts_keep)
                ijk = numpy.delete(ijk, verts_keep, axis=0)

                for vertex_index in range(verts_indices_of_label.size):
                    # Generate the specific grid centered at the voxel ijk
                    ijk_grid = grid + \
                               numpy.tile(ijk[vertex_index, :], (n_grid, 1))

                    # Remove voxels outside the volume
                    indexes_within_limits = numpy.all([(ijk_grid[:, 0] >= 0), (ijk_grid[:, 0] < volume.dimensions[0]),
                                                       (ijk_grid[:, 1] >= 0), (ijk_grid[
                                                                               :, 1] < volume.dimensions[1]),
                                                       (ijk_grid[:, 2] >= 0), (ijk_grid[:, 2] < volume.dimensions[2])],
                                                      axis=0)
                    ijk_grid = ijk_grid[indexes_within_limits, :]
                    surf_vxls = volume.data[
                        ijk_grid[:, 0], ijk_grid[:, 1], ijk_grid[:, 2]]

                    # If any of the neighbors is of the target labels include
                    # the current vertex
                    # surf_vxls==lbl if only one target label
                    if numpy.any(numpy.in1d(surf_vxls, all_labels)):
                        verts_out_mask[
                            verts_indices_of_label[vertex_index]] = True

        # Vertex indexes and vertices to keep:
        verts_out_indices, = numpy.where(verts_out_mask)
        verts_out = surface.vertices[verts_out_indices]

        # TODO maybe: make sure that all voxels of this label correspond to at least one vertex.
        # Create a similar mask for faces by picking only triangles of which
        # all 3 vertices are included
        face_out_mask = numpy.c_[
            verts_out_mask[surface.triangles[:, 0]], verts_out_mask[surface.triangles[:, 1]], verts_out_mask[
                surface.triangles[:, 2]]].all(axis=1)
        faces_out = surface.triangles[face_out_mask]

        # The old vertices' indexes of faces have to be transformed to the new
        # vrtx_out_inds:
        for iF in range(faces_out.shape[0]):
            for vertex_index in range(3):
                faces_out[iF, vertex_index], = numpy.where(
                    faces_out[iF, vertex_index] == verts_out_indices)

        surface.vertices = verts_out
        surface.triangles = faces_out

        # Write the output surfaces and annotations to files. Also write files
        # with the indexes of vertices to keep.
        IOUtils.write_surface(out_surf_path, surface)

        annotation.set_region_mapping(
            annotation.get_region_mapping_by_indices([verts_out_indices]))
        IOUtils.write_annotation(out_surf_path + ".annot", annotation)

        numpy.save(out_surf_path + "-idx.npy", verts_out_indices)
        numpy.savetxt(out_surf_path + "-idx.txt", verts_out_indices, fmt='%d')

        return surface, annotation
Beispiel #32
0
 def convert_fs_to_brain_visa(self, in_surf_path: str, out_surf_path: Optional[str]=None):
     surface = IOUtils.read_surface(in_surf_path, False)
     if out_surf_path is None:
         out_surf_path = in_surf_path + '.tri'
     IOUtils.write_surface(out_surf_path, surface)
Beispiel #33
0
def create_tvb_dataset(atlas_suffix: AtlasSuffix,
                       mri_direc: os.PathLike,
                       region_details_direc: os.PathLike,
                       weights_file: os.PathLike,
                       tracts_file: os.PathLike,
                       out_dir: os.PathLike,
                       bring_t1=False):
    weights_matrix = numpy.loadtxt(str(weights_file), dtype='i', delimiter=' ')
    weights_matrix += weights_matrix.T

    tracts_matrix = numpy.loadtxt(str(tracts_file), dtype='f', delimiter=' ')
    tracts_matrix += tracts_matrix.T

    is_cortical_rm = numpy.genfromtxt(os.path.join(
        region_details_direc,
        AsegFiles.CORTICAL_TXT.value.replace("%s", atlas_suffix)),
                                      usecols=[0],
                                      dtype='i')
    region_names = numpy.genfromtxt(os.path.join(
        region_details_direc,
        AsegFiles.CENTERS_TXT.value.replace("%s", atlas_suffix)),
                                    usecols=[0],
                                    dtype="str")
    region_centers = numpy.genfromtxt(os.path.join(
        region_details_direc,
        AsegFiles.CENTERS_TXT.value.replace("%s", atlas_suffix)),
                                      usecols=[1, 2, 3])
    region_areas = numpy.genfromtxt(os.path.join(
        region_details_direc,
        AsegFiles.AREAS_TXT.value.replace("%s", atlas_suffix)),
                                    usecols=[0])
    region_orientations = numpy.genfromtxt(os.path.join(
        region_details_direc,
        AsegFiles.ORIENTATIONS_TXT.value.replace("%s", atlas_suffix)),
                                           usecols=[0, 1, 2])
    rm_idx = numpy.genfromtxt(os.path.join(
        region_details_direc,
        AsegFiles.RM_TO_APARC_ASEG_TXT.value.replace("%s", atlas_suffix)),
                              usecols=[0, 1],
                              dtype='i')
    rm_index_dict = dict(zip(rm_idx[:, 0], rm_idx[:, 1]))
    print(rm_index_dict)

    genericIO = GenericIO()
    genericIO.write_connectivity_zip(out_dir, weights_matrix, tracts_matrix,
                                     is_cortical_rm, region_names,
                                     region_centers, region_areas,
                                     region_orientations, atlas_suffix)

    aparc_aseg_file = os.path.join(
        mri_direc, T1Files.APARC_ASEG_NII_GZ.value.replace("%s", atlas_suffix))
    aparc_aseg_volume = IOUtils.read_volume(aparc_aseg_file)

    volume_service = VolumeService()
    aparc_aseg_cor_volume = volume_service.change_labels_of_aparc_aseg(
        atlas_suffix, aparc_aseg_volume, rm_index_dict,
        weights_matrix.shape[0])
    IOUtils.write_volume(
        os.path.join(
            out_dir,
            OutputConvFiles.APARC_ASEG_COR_NII_GZ.value.replace(
                "%s", atlas_suffix)), aparc_aseg_cor_volume)

    if bring_t1:
        shutil.copy2(os.path.join(mri_direc, "T1.nii.gz"), out_dir)