Exemple #1
0
def test_connectivity_matrix():
    label_volume = np.array([[[3, 0, 0], [0, 0, 0], [0, 0, 4]]])
    streamlines = [
        np.array([[0, 0, 0], [0, 0, 0], [0, 2, 2]], 'float'),
        np.array([[0, 0, 0], [0, 1, 1], [0, 2, 2]], 'float'),
        np.array([[0, 2, 2], [0, 1, 1], [0, 0, 0]], 'float')
    ]
    expected = np.zeros((5, 5), 'int')
    expected[3, 4] = 2
    expected[4, 3] = 1
    # Check basic Case
    matrix = connectivity_matrix(streamlines,
                                 np.eye(4),
                                 label_volume,
                                 symmetric=False)
    npt.assert_array_equal(matrix, expected)
    # Test mapping
    matrix, mapping = connectivity_matrix(streamlines,
                                          np.eye(4),
                                          label_volume,
                                          symmetric=False,
                                          return_mapping=True)
    npt.assert_array_equal(matrix, expected)
    npt.assert_equal(mapping[3, 4], [0, 1])
    npt.assert_equal(mapping[4, 3], [2])
    npt.assert_equal(mapping.get((0, 0)), None)
    # Test mapping and symmetric
    matrix, mapping = connectivity_matrix(streamlines,
                                          np.eye(4),
                                          label_volume,
                                          symmetric=True,
                                          return_mapping=True)
    npt.assert_equal(mapping[3, 4], [0, 1, 2])
    # When symmetric only (3,4) is a key, not (4, 3)
    npt.assert_equal(mapping.get((4, 3)), None)
    # expected output matrix is symmetric version of expected
    expected = expected + expected.T
    npt.assert_array_equal(matrix, expected)
    # Test mapping_as_streamlines, mapping dict has lists of streamlines
    matrix, mapping = connectivity_matrix(streamlines,
                                          np.eye(4),
                                          label_volume,
                                          symmetric=False,
                                          return_mapping=True,
                                          mapping_as_streamlines=True)
    assert_true(mapping[3, 4][0] is streamlines[0])
    assert_true(mapping[3, 4][1] is streamlines[1])
    assert_true(mapping[4, 3][0] is streamlines[2])

    # Test passing affine to connectivity_matrix
    affine = np.diag([-1, -1, -1, 1.])
    streamlines = [-i for i in streamlines]
    matrix = connectivity_matrix(streamlines, affine, label_volume)
    # In the symmetrical case, the matrix should be, well, symmetric:
    npt.assert_equal(matrix[4, 3], matrix[4, 3])
Exemple #2
0
def comp_con_mat(n,fa, streamlines,lab_labels_index,affine, folder_name):
    import scipy.io as sio
    from dipy.tracking.streamline import values_from_volume

    num, grouping = utils.connectivity_matrix(streamlines, affine, lab_labels_index,
                                                    return_mapping=True,
                                                    mapping_as_streamlines=True)
    num_mat = num[1:,1:]
    num_mat = np.asarray(num_mat,dtype='float64')
    vol_vec = fa.flatten()
    q = np.quantile(vol_vec[vol_vec>0], 0.95)
    fa_mat = np.zeros(np.shape(num_mat), dtype='float64')

    for pair, tracts in grouping.items():
        if pair[0] == 0 or pair[1] == 0:
            continue
        else:
            mean_vol_per_tract = []
            vol_per_tract = values_from_volume(fa, tracts, affine=affine)
            for s in vol_per_tract:
                s = np.asanyarray(s)
                non_out = [s < q]
                mean_vol_per_tract.append(np.nanmean(s[non_out]))

            mean_path_vol = np.nanmean(mean_vol_per_tract)

            fa_mat[pair[0]-1, pair[1]-1] = mean_path_vol
            fa_mat[pair[1]-1, pair[0]-1] = mean_path_vol

    mat_file_name = rf'{folder_name}\{n}_con_mat.mat'
    sio.savemat(mat_file_name, {'number_of_tracts': num_mat,'fa':fa_mat})
Exemple #3
0
def non_weighted_con_mat_mega(streamlines,
                              lab_labels_index,
                              affine,
                              idx,
                              folder_name,
                              fig_type=""):
    from dipy.tracking import utils
    import numpy as np

    if len(fig_type) >> 0:
        fig_type = "_" + fig_type

    m, grouping = utils.connectivity_matrix(
        streamlines,
        affine,
        lab_labels_index,
        return_mapping=True,
        mapping_as_streamlines=True,
    )
    mm = m[1:]
    mm = mm[:, 1:]
    mm = mm[idx]
    mm = mm[:, idx]
    new_data = (
        1 / mm
    )  # values distribute between 0 and 1, 1 represents distant nodes (only 1 tract)
    # new_data[new_data > 1] = 2
    np.save(f"{folder_name}/non-weighted_mega{fig_type}", new_data)
    np.save(f"{folder_name}/non-weighted_mega{fig_type}_nonnorm", mm)

    return new_data, m, grouping
def streamlines2graph(streamlines, affine, parcellation, output_file):
    # Load Images
    parcellation_loaded = nib.load(parcellation)
    parcellation_data = parcellation_loaded.get_data()

    uniq = np.unique(parcellation_data)
    parcellation_data = parcellation_data.astype(int)
    if list(uniq) != list(np.unique(parcellation_data)):
        raise TypeError("Parcellation labels should be integers.")

    # Perform tracing
    graph, _ = utils.connectivity_matrix(streamlines, parcellation_data,
                                         affine=affine,
                                         return_mapping=True,
                                         mapping_as_streamlines=True)
    # Deleting edges with the background
    graph = np.delete(graph, (0), axis=0)
    graph = np.delete(graph, (0), axis=1)

    np.savetxt(output_file + ".mat", graph)
    plt.imshow(np.log1p(graph), interpolation='nearest')
    try:
        plt.savefig(output_file + ".png")
    except ValueError:
        pass
Exemple #5
0
    def __init__(self,atlas,subj_folder,cm_name = None, index_to_text_file=None, tract_name = 'HCP_tracts.tck'):
        from dipy.tracking import utils
        from Tractography.files_loading import load_ft

        self.subj_folder = subj_folder
        self.atlas = atlas
        cm_nodes = ConMatNodes(self.atlas, index_to_text_file)
        self.index_to_text_file = cm_nodes.index_to_text_file
        self.labels = cm_nodes.labels_headers
        self.idx = cm_nodes.idx
        self.affine, self.lab_labels_index = cm_nodes.nodes_by_idx(self.subj_folder)
        nii_ref = os.path.join(subj_folder,'data.nii')
        if not cm_name:
            tract_path = os.path.join(self.subj_folder, 'streamlines', tract_name)
            streamlines = load_ft(tract_path, nii_ref)
            m, self.grouping = utils.connectivity_matrix(streamlines, self.affine, self.lab_labels_index,
                                                         return_mapping=True,
                                                         mapping_as_streamlines=True)
            self.fix_cm(m)
            self.ord_cm()
            self.norm_cm = 1 / self.ord_cm

        else:
            cm_file_name = f'{subj_folder}cm{os.sep}{cm_name}.npy'
            self.cm = np.load(cm_file_name)
Exemple #6
0
def non_weighted_con_mat_mega(streamlines,
                              lab_labels_index,
                              affine,
                              idx,
                              folder_name,
                              fig_type=''):
    from dipy.tracking import utils

    if len(fig_type) >> 0:
        fig_type = '_' + fig_type

    m, grouping = utils.connectivity_matrix(streamlines,
                                            affine,
                                            lab_labels_index,
                                            return_mapping=True,
                                            mapping_as_streamlines=True)
    mm = m[1:]
    mm = mm[:, 1:]
    if 'aal3' in fig_type:
        mm = np.delete(mm, [34, 35, 80, 81], 0)
        mm = np.delete(mm, [34, 35, 80, 81], 1)

    mm = mm[idx]
    mm = mm[:, idx]
    new_data = 1 / mm  # values distribute between 0 and 1, 1 represents distant nodes (only 1 tract)
    #new_data[new_data > 1] = 2
    #np.save(folder_name + r'\non-weighted_mega'+fig_type, new_data)
    np.save(folder_name + r'\non-weighted' + fig_type + '_nonnorm', mm)

    return new_data, mm, grouping
Exemple #7
0
def test_connectivity_matrix():
    label_volume = np.array([[[3, 0, 0],
                              [0, 0, 0],
                              [0, 0, 4]]])
    streamlines = [np.array([[0, 0, 0], [0, 0, 0], [0, 2, 2]], 'float'),
                   np.array([[0, 0, 0], [0, 1, 1], [0, 2, 2]], 'float'),
                   np.array([[0, 2, 2], [0, 1, 1], [0, 0, 0]], 'float')]
    expected = np.zeros((5, 5), 'int')
    expected[3, 4] = 2
    expected[4, 3] = 1
    # Check basic Case
    matrix = connectivity_matrix(streamlines, label_volume, (1, 1, 1),
                                 symmetric=False)
    assert_array_equal(matrix, expected)
    # Test mapping
    matrix, mapping = connectivity_matrix(streamlines, label_volume, (1, 1, 1),
                                          symmetric=False, return_mapping=True)
    assert_array_equal(matrix, expected)
    assert_equal(mapping[3, 4], [0, 1])
    assert_equal(mapping[4, 3], [2])
    assert_equal(mapping.get((0, 0)), None)
    # Test mapping and symmetric
    matrix, mapping = connectivity_matrix(streamlines, label_volume, (1, 1, 1),
                                          symmetric=True, return_mapping=True)
    assert_equal(mapping[3, 4], [0, 1, 2])
    # When symmetric only (3,4) is a key, not (4, 3)
    assert_equal(mapping.get((4, 3)), None)
    # expected output matrix is symmetric version of expected
    expected = expected + expected.T
    assert_array_equal(matrix, expected)
    # Test mapping_as_streamlines, mapping dict has lists of streamlines
    matrix, mapping = connectivity_matrix(streamlines, label_volume, (1, 1, 1),
                                          symmetric=False,
                                          return_mapping=True,
                                          mapping_as_streamlines=True)
    assert_true(mapping[3, 4][0] is streamlines[0])
    assert_true(mapping[3, 4][1] is streamlines[1])
    assert_true(mapping[4, 3][0] is streamlines[2])

    # Test passing affine to connectivity_matrix
    expected = matrix
    affine = np.diag([-1, -1, -1, 1.])
    streamlines = [-i for i in streamlines]
    matrix = connectivity_matrix(streamlines, label_volume, affine=affine)
    # In the symmetrical case, the matrix should be, well, symmetric:
    assert_equal(matrix[4, 3], matrix[4, 3])
Exemple #8
0
def create_streamline_dict(streamlines, lab_labels_index, affine):
    from dipy.tracking import utils

    m, grouping = utils.connectivity_matrix(streamlines,
                                            affine,
                                            lab_labels_index,
                                            return_mapping=True,
                                            mapping_as_streamlines=True)

    return grouping
def make_sub_cnxn_mappings(sub,dpy_file,parc_file,outdir,overwrite=True):

  import os,h5py,numpy as np,nibabel as nib
  from dipy.io import Dpy
  from dipy.tracking.utils import connectivity_matrix,length
    
  if not os.path.isdir(outdir): os.makedirs(outdir)

  cnxn_inds_outfile = '%s/G_%s.h5' %(outdir,sub)
  cnxn_lens_outfile = '%s/L_%s.h5' %(outdir,sub)
  cnxn_mat_outfile = '%s/M_%s.txt' %(outdir,sub)

  if overwrite == True: 
    for f in [cnxn_inds_outfile, cnxn_lens_outfile, cnxn_mat_outfile]:
      if os.path.isfile(f):
        print 'file exists (%s). Removing...' %f
        os.system('rm %s' %f)
        

  print '...loading streamlines'
  D = Dpy(dpy_file, 'r')
  dpy_streams = D.read_tracks()
  D.close()


  print '...loading parcellation'
  parc_img = nib.load(parc_file)
  parc_dat = parc_img.get_data().astype(np.int)

  affine = np.eye(4)

  print '...computing connectivity'
  M,G = connectivity_matrix(dpy_streams,parc_dat,affine=affine,
                            return_mapping=True,mapping_as_streamlines=False)

  print '...computing lengths'
  L = {k: list(length([dpy_streams[vv] for vv in v])) for k,v in G.items()}


  print '...writing cnxn inds to file'
  F = h5py.File(cnxn_inds_outfile, 'a')
  for k,v in G.items():  F['%s-%s' %(k[0],k[1])] = v
  F.close()

  print '...writing cnxn lens to file'
  F = h5py.File(cnxn_lens_outfile, 'a')
  for k,v in L.items():  F['%s-%s' %(k[0],k[1])] = v
  F.close()

  print '...writing connectivity matrix to file'
  np.savetxt(cnxn_mat_outfile,M)


  return cnxn_inds_outfile,cnxn_lens_outfile,cnxn_mat_outfile
	def _run_interface(self, runtime):
		# Loading the ROI file
	    import nibabel as nib
	    import numpy as np
	    from dipy.tracking import utils

	    img = nib.load(self.inputs.ROI_file)
	    data = img.get_data()
	    affine = img.get_affine()

	    # Getting the FA file
	    img = nib.load(self.inputs.FA_file)
	    FA_data = img.get_data()
	    FA_affine = img.get_affine()

	    # Loading the streamlines
	    from nibabel import trackvis
	    streams, hdr = trackvis.read(self.inputs.trackfile,points_space='rasmm')
	    streamlines = [s[0] for s in streams]
	    streamlines_affine = trackvis.aff_from_hdr(hdr,atleast_v2=True)

	    # Checking for negative values
	    from dipy.tracking._utils import _mapping_to_voxel, _to_voxel_coordinates
	    endpoints = [sl[0::len(sl)-1] for sl in streamlines]
	    lin_T, offset = _mapping_to_voxel(affine, (1.,1.,1.))
	    inds = np.dot(endpoints, lin_T)
	    inds += offset
	    negative_values = np.where(inds <0)[0]
	    for negative_value in sorted(negative_values, reverse=True):
			del streamlines[negative_value]

	    # Constructing the streamlines matrix
	    matrix,mapping = utils.connectivity_matrix(streamlines=streamlines,label_volume=data,affine=streamlines_affine,symmetric=True,return_mapping=True,mapping_as_streamlines=True)
	    matrix[matrix < 10] = 0

	    # Constructing the FA matrix
	    dimensions = matrix.shape
	    FA_matrix = np.empty(shape=dimensions)

	    for i in range(0,dimensions[0]):
	        for j in range(0,dimensions[1]):
	            if matrix[i,j]:
	                dm = utils.density_map(mapping[i,j], FA_data.shape, affine=streamlines_affine)
            		FA_matrix[i,j] = np.mean(FA_data[dm>0])
	            else:
	                FA_matrix[i,j] = 0

	    FA_matrix[np.tril_indices(n=len(FA_matrix))] = 0
	    FA_matrix = FA_matrix.T + FA_matrix - np.diagonal(FA_matrix)

	    from nipype.utils.filemanip import split_filename
	    _, base, _ = split_filename(self.inputs.trackfile)
	    np.savetxt(base + '_FA_matrix.txt',FA_matrix,delimiter='\t')
	    return runtime
Exemple #11
0
def test_connectivity_matrix_shape():
    # Labels: z-planes have labels 0,1,2
    labels = np.zeros((3, 3, 3), dtype=int)
    labels[:, :, 1] = 1
    labels[:, :, 2] = 2
    # Streamline set, only moves between first two z-planes.
    streamlines = [
        np.array([[0., 0., 0.], [0., 0., 0.5], [0., 0., 1.]]),
        np.array([[0., 1., 1.], [0., 1., 0.5], [0., 1., 0.]])
    ]
    matrix = connectivity_matrix(streamlines, np.eye(4), labels)
    npt.assert_equal(matrix.shape, (3, 3))
Exemple #12
0
def test_connectivity_matrix():
    label_volume = np.array([[[3, 0, 0], [0, 0, 0], [0, 0, 4]]])
    streamlines = [
        np.array([[0, 0, 0], [0, 0, 0], [0, 2, 2]], 'float'),
        np.array([[0, 0, 0], [0, 1, 1], [0, 2, 2]], 'float'),
        np.array([[0, 2, 2], [0, 1, 1], [0, 0, 0]], 'float')
    ]
    expected = np.zeros((5, 5), 'int')
    expected[3, 4] = 2
    expected[4, 3] = 1
    # Check basic Case
    matrix = connectivity_matrix(streamlines, label_volume, (1, 1, 1))
    assert_array_equal(matrix, expected)
    # Test mapping
    matrix, mapping = connectivity_matrix(streamlines,
                                          label_volume, (1, 1, 1),
                                          return_mapping=True)
    assert_array_equal(matrix, expected)
    assert_equal(mapping[3, 4], [0, 1])
    assert_equal(mapping[4, 3], [2])
    assert_raises(KeyError, mapping.__getitem__, (0, 0))
    # Test mapping and symmetric
    matrix, mapping = connectivity_matrix(streamlines,
                                          label_volume, (1, 1, 1),
                                          True,
                                          return_mapping=True)
    assert_equal(mapping[3, 4], [0, 1, 2])
    # When symmetric only (3,4) is a key, not (4, 3)
    assert_raises(KeyError, mapping.__getitem__, (4, 3))
    # expected output matrix is symmetric version of expected
    expected = expected + expected.T
    assert_array_equal(matrix, expected)
    # Test mapping_as_streamlines, mapping dict has lists of streamlines
    matrix, mapping = connectivity_matrix(streamlines,
                                          label_volume, (1, 1, 1),
                                          return_mapping=True,
                                          mapping_as_streamlines=True)
    assert_true(mapping[3, 4][0] is streamlines[0])
    assert_true(mapping[3, 4][1] is streamlines[1])
    assert_true(mapping[4, 3][0] is streamlines[2])
Exemple #13
0
def track_gen_net_work(tracks, img_template):
    labels = img_template.get_data()
    labels = labels.astype(int)
    M, grouping = utils.connectivity_matrix(tracks,
                                            labels,
                                            affine=img_template.get_affine(),
                                            return_mapping=True,
                                            mapping_as_streamlines=True)

    p = Struct()
    p.M = M
    p.grouping = grouping
    return p
Exemple #14
0
def test_connectivity_matrix_shape():
    # Labels: z-planes have labels 0,1,2
    labels = np.zeros((3, 3, 3), dtype=int)
    labels[:, :, 1] = 1
    labels[:, :, 2] = 2
    # Streamline set, only moves between first two z-planes.
    streamlines = [np.array([[0., 0., 0.],
                             [0., 0., 0.5],
                             [0., 0., 1.]]),
                   np.array([[0., 1., 1.],
                             [0., 1., 0.5],
                             [0., 1., 0.]])]
    matrix = connectivity_matrix(streamlines, labels, affine=np.eye(4))
    assert_equal(matrix.shape, (3, 3))
Exemple #15
0
 def connectivity_matrix_compute(self, streamlines, affine, plot_file_name):
     '''
     CONNECTIVITY_MATRIX function to find out which regions of the brain 
     are connected by these streamlines. This function takes a set of streamlines 
     and an array of labels as arguments. It returns the number of streamlines that start and end at 
     each pair of labels and it can return the streamlines grouped by their endpoints
     '''
     M, grouping = utils.connectivity_matrix(streamlines,
                                             affine,
                                             self.labels.astype(np.uint8),
                                             return_mapping=True,
                                             mapping_as_streamlines=True)
     self.save_plot(np.log1p(M), plot_file_name)
     return grouping
Exemple #16
0
def test_connectivity_matrix():
    label_volume = np.array([[[3, 0, 0],
                              [0, 0, 0],
                              [0, 0, 4]]])
    streamlines = [np.array([[0,0,0],[0,0,0],[0,2,2]], 'float'),
                   np.array([[0,0,0],[0,1,1],[0,2,2]], 'float'),
                   np.array([[0,2,2],[0,1,1],[0,0,0]], 'float')]
    expected = np.zeros((5, 5), 'int')
    expected[3, 4] = 2
    expected[4, 3] = 1
    # Check basic Case
    matrix = connectivity_matrix(streamlines, label_volume, (1, 1, 1))
    assert_array_equal(matrix, expected)
    # Test mapping
    matrix, mapping = connectivity_matrix(streamlines, label_volume, (1, 1, 1),
                                          return_mapping=True)
    assert_array_equal(matrix, expected)
    assert_equal(mapping[3, 4], [0, 1])
    assert_equal(mapping[4, 3], [2])
    assert_raises(KeyError, mapping.__getitem__, (0, 0))
    # Test mapping and symmetric
    matrix, mapping = connectivity_matrix(streamlines, label_volume, (1, 1, 1),
                                          True, return_mapping=True)
    assert_equal(mapping[3, 4], [0, 1, 2])
    # When symmetric only (3,4) is a key, not (4, 3)
    assert_raises(KeyError, mapping.__getitem__, (4, 3))
    # expected output matrix is symmetric version of expected
    expected = expected + expected.T
    assert_array_equal(matrix, expected)
    # Test mapping_as_streamlines, mapping dict has lists of streamlines
    matrix, mapping = connectivity_matrix(streamlines, label_volume, (1, 1, 1),
                                          return_mapping=True,
                                          mapping_as_streamlines=True)
    assert_true(mapping[3, 4][0] is streamlines[0])
    assert_true(mapping[3, 4][1] is streamlines[1])
    assert_true(mapping[4, 3][0] is streamlines[2])
Exemple #17
0
def non_weighted_con_mat(streamlines,
                         lab_labels_index,
                         affine,
                         folder_name,
                         fig_type=''):
    from dipy.tracking import utils
    m, grouping = utils.connectivity_matrix(streamlines,
                                            affine,
                                            lab_labels_index,
                                            return_mapping=True,
                                            mapping_as_streamlines=True)
    #remove '0' label from mat:
    mm = m[1:]
    mm = mm[:, 1:]

    np.save(f'{folder_name}{os.sep}{fig_type}', mm)

    return mm, grouping
Exemple #18
0
def track_gen_net_work(atlas_path=None,
                       atlas=None,
                       data_path=None,
                       affine=None,
                       track_path=None,
                       streamlines=None,
                       return_matrix=False,
                       save_matrix=True,
                       output_path=None):

    import scipy.io as scio

    if atlas_path != None:
        img_atlas = nib.load(atlas_path)
        labels = img_atlas.get_data()
        labels = labels.astype(int)
    else:
        labels = atlas

    if data_path != None:
        data, affine, hardi_img = load_nifti(data_path, return_img=True)

    if track_path == None:
        tracks = streamlines
    else:
        tracks = dwi.load_streamlines_from_trk(track_path)
        tracks = reduct(tracks)

    M, grouping = utils.connectivity_matrix(tracks,
                                            affine=affine,
                                            label_volume=labels,
                                            return_mapping=True,
                                            mapping_as_streamlines=True)
    if save_matrix:
        scio.savemat(output_path, {'matrix': M})

    if return_matrix:
        return M
def connective_label(streamlines,labels,affine,hdr,f_name,data_path):
    """
    Once we've targeted on the corpus callosum ROI, we might want to find out which
    regions of the brain are connected by these streamlines. To do this we can use
    the ``connectivity_matrix`` function. This function takes a set of streamlines
    and an array of labels as arguments. It returns the number of streamlines that
    start and end at each pair of labels and it can return the streamlines grouped
    by their endpoints. Notice that this function only considers the endpoints of
    each streamline.
    """
    
    M, grouping = utils.connectivity_matrix(streamlines, labels, affine=affine,
                                            return_mapping=True,
                                            mapping_as_streamlines=True)
#    M[:3, :] = 0
#    M[:, :3] = 0
    
    print M

    
    """
    We've set ``return_mapping`` and ``mapping_as_streamlines`` to ``True`` so that
    ``connectivity_matrix`` returns all the streamlines in ``cc_streamlines``
    grouped by their endpoint.
    
    Because we're typically only interested in connections between gray matter
    regions, and because the label 0 represents background and the labels 1 and 2
    represent white matter, we discard the first three rows and columns of the
    connectivity matrix.
    
    We can now display this matrix using matplotlib, we display it using a log
    scale to make small values in the matrix easier to see.
    """
    
    import matplotlib.pyplot as plt
    plt.imshow(np.log1p(M), interpolation='nearest')
    plt.savefig("connectivity.png")
    return M,grouping
Exemple #20
0
def connective_label(streamlines, labels, affine, hdr, f_name, data_path):
    """
    Once we've targeted on the corpus callosum ROI, we might want to find out which
    regions of the brain are connected by these streamlines. To do this we can use
    the ``connectivity_matrix`` function. This function takes a set of streamlines
    and an array of labels as arguments. It returns the number of streamlines that
    start and end at each pair of labels and it can return the streamlines grouped
    by their endpoints. Notice that this function only considers the endpoints of
    each streamline.
    """

    M, grouping = utils.connectivity_matrix(streamlines,
                                            labels,
                                            affine=affine,
                                            return_mapping=True,
                                            mapping_as_streamlines=True)
    #    M[:3, :] = 0
    #    M[:, :3] = 0

    print M
    """
    We've set ``return_mapping`` and ``mapping_as_streamlines`` to ``True`` so that
    ``connectivity_matrix`` returns all the streamlines in ``cc_streamlines``
    grouped by their endpoint.
    
    Because we're typically only interested in connections between gray matter
    regions, and because the label 0 represents background and the labels 1 and 2
    represent white matter, we discard the first three rows and columns of the
    connectivity matrix.
    
    We can now display this matrix using matplotlib, we display it using a log
    scale to make small values in the matrix easier to see.
    """

    import matplotlib.pyplot as plt
    plt.imshow(np.log1p(M), interpolation='nearest')
    plt.savefig("connectivity.png")
    return M, grouping
def compute_conmats(dir_res_out):

    # Connectiivty matrices are compute for all tractograms
    # and all .nii.gz parcellation files prefixed with 'parc_'

    dpys = glob.glob(dir_res_out + '/*.dpy')
    parcs = glob.glob(dir_res_out + '/parc_*.nii.gz')

    for parc_file in parcs:

      parc_name = os.path.split(parc_file)[1].replace('.nii.gz', '')
      parc_img = nib.load(parc_file)
      parc_dat = parc_img.get_data().astype('int32')

      affine = np.eye(4)

      for dpy_file in dpys:

        dpy_name = os.path.split(dpy_file)[1].replace('.dpy', '')
        cm_name = 'conmat__' + parc_name.replace('parc__', '') \
                             + dpy_name.replace('tractogram', '')
        mapping_name = cm_name.replace('Conmat', 'conmat_mapping')

        cm_file = dir_res_out + '/' + cm_name + '.h5'

        dpy = Dpy(dpy_file, 'r')
        dpy_streams = dpy.read_tracks()
        dpy.close()

        M,G = connectivity_matrix(dpy_streams,parc_dat,affine=affine,
                                  return_mapping=True,mapping_as_streamlines=False)

        F = h5py.File(cm_file, 'w')
        F.create_dataset('M', data=M)
        g = F.create_group('G')  
        for k,v in G.items(): g[str(k)] = v
        F.close()
def streamlines2graph(streamlines, affine, parcellation, output_file):
    # Load Images
    parcellation_loaded = nib.load(parcellation)
    parcellation_data = parcellation_loaded.get_fdata()

    uniq = np.unique(parcellation_data)
    parcellation_data = parcellation_data.astype(int)
    if list(uniq) != list(np.unique(parcellation_data)):
        raise TypeError("Parcellation labels should be integers.")

    # Perform tracing
    graph, mapping = utils.connectivity_matrix(streamlines,
                                               affine,
                                               parcellation_data,
                                               symmetric=True,
                                               return_mapping=True)
    # Deleting edges with the background
    graph = np.delete(graph, (0), axis=0)
    graph = np.delete(graph, (0), axis=1)
    map_keys = sorted(mapping.keys())

    np.savetxt(output_file + ".mat", graph)
    with open(output_file + "_mapping.json", "w") as fhandle:
        for k in map_keys:
            # ignore background fibers
            if 0 in k:
                continue
            v = mapping[k]
            fhandle.write("{0}\t{1}\t{2}\n".format(
                k[0], k[1], ",".join([str(_) for _ in v])))

    plt.imshow(np.log1p(graph), interpolation='nearest')
    try:
        plt.savefig(output_file + ".png")
    except ValueError:
        pass
Exemple #23
0
def streamlins_len_connectivity_mat(folder_name,
                                    streamlines,
                                    lab_labels_index,
                                    idx,
                                    fig_type='lengths'):
    m, grouping = utils.connectivity_matrix(streamlines,
                                            affine,
                                            lab_labels_index,
                                            return_mapping=True,
                                            mapping_as_streamlines=True)
    new_m = np.zeros(m.shape)
    new_grouping = grouping.copy()
    for k, v in new_grouping.items():
        if k[0] == 0 or k[1] == 0:
            continue
        lengths = []
        for stream in v:
            lengths.append(stream.shape[0])
        new_m[k[0] - 1, k[1] - 1] = np.mean(lengths)
        new_m[k[1] - 1, k[0] - 1] = np.mean(lengths)

    new_mm = new_m[idx]
    new_mm = new_mm[:, idx]
    np.save(folder_name + r'\weighted_' + fig_type + '_nonnorm', new_mm)
Exemple #24
0
if __name__ == '__main__':
    main_folder = subj_folder

    for n,s in zip(all_subj_names[1::],all_subj_folders[1::]):
        folder_name = main_folder+s
        dir_name = folder_name + '\streamlines'
        sft_target = load_trk(f'{dir_name}{n}_wholebrain_3d.trk', "same", bbox_valid_check=False)
        streamlines = sft_target.streamlines
        bvec_file = load_dwi_files(folder_name)[6]

        index_to_text_file = r'C:\Users\hila\data\megaatlas\megaatlas2nii.txt'
        idx = nodes_labels_mega(index_to_text_file)[1]
        lab_labels_index, affine = nodes_by_index_mega(folder_name)
        m, grouping = utils.connectivity_matrix(streamlines, affine, lab_labels_index,
                                                return_mapping=True,
                                                mapping_as_streamlines=True)
        mat_file = f'{folder_name}\weighted_mega_wholebrain_4d_labmask.npy'
        con_mat = np.load(mat_file)
        id = np.argsort(idx)
        con_mat = con_mat[id]
        con_mat = con_mat[:, id]

        vec_vols = []
        s_list = []
        for pair, tracts in grouping.items():
            if pair[0] == 0 or pair[1] == 0:
                continue
            else:
                th = 5
                max_clus = 5
Exemple #25
0
def main():
    parser = buildArgsParser()
    args = parser.parse_args()

    if not os.path.isfile(args.tracts):
        parser.error("Tracts file: {0} does not exist.".format(args.tracts))

    # TODO check scilpy supports

    if not os.path.isfile(args.aparc):
        parser.error("Label file: {0} does not exist.".format(args.aparc))

    if not os.path.isfile(args.labels):
        parser.error("Requested region file: {0} does not exist.".format(
            args.labels))

    if not os.path.isfile(args.lut):
        parser.error("Freesurfer LUT file: {0} does not exist.".format(
            args.lut))

    if os.path.isfile(args.out_matrix) and not args.force_overwrite:
        parser.error(
            "Output: {0} already exists. To overwrite, use -f.".format(
                args.out_matrix))

    if os.path.isfile(args.out_row_map) and not args.force_overwrite:
        parser.error(
            "Output: {0} already exists. To overwrite, use -f.".format(
                args.out_row_map))

    if os.path.splitext(args.out_matrix)[1] != ".npy":
        parser.error("Connectivity matrix must be saved in a .npy file.")

    if os.path.splitext(args.out_row_map)[1] != ".pkl":
        parser.error("Mapping must be saved in a .pkl file.")

    # Validate that tracts can be processed
    if not validate_coordinates(args.aparc, args.tracts, nifti_compliant=True):
        parser.error("The tracts file contains points that are invalid.\n" +
                     "Use the remove_invalid_coordinates.py script to clean.")

    # Load labels
    labels_img = nib.load(args.aparc)
    full_labels = labels_img.get_data().astype('int')

    # Compute the mapping from label name to label id
    label_id_mapping = compute_labels_map(args.lut)

    # Find which labels were requested by the user.
    requested_labels_mapping = compute_requested_labels(
        args.labels, label_id_mapping)

    # Filter to keep only needed ones
    filtered_labels = np.zeros(full_labels.shape, dtype='int')
    for label_val in requested_labels_mapping:
        filtered_labels[full_labels == label_val] = label_val

    # Reduce the range of labels to avoid a sparse matrix,
    # because the ids of labels can range from 0 to the 12000's.
    reduced_labels, labels_lut = dpu.reduce_labels(filtered_labels)

    # Load tracts
    tract_format = tc.detect_format(args.tracts)
    tract = tract_format(args.tracts, args.aparc)

    streamlines = [t for t in tract]
    f_streamlines = []
    for sl in streamlines:
        # Avoid streamlines having only one point, as they crash the
        # Dipy connectivity matrix function.
        if sl.shape[0] > 1:
            f_streamlines.append(sl)

    # Compute affine
    affine = compute_affine_for_dipy_functions(args.aparc, args.tracts)

    # Compute matrix
    M = dpu.connectivity_matrix(f_streamlines,
                                reduced_labels,
                                affine=affine,
                                symmetric=True,
                                return_mapping=False,
                                mapping_as_streamlines=False)
    # Remove background connectivity
    M = M[1:, 1:]

    # Save needed files
    np.save(args.out_matrix, np.array(M))

    # Compute the mapping between row numbers, labels and ids.
    sorted_lut = sorted(labels_lut)
    row_name_map = {}
    # Skip first for BG
    for id, lab_val in enumerate(sorted_lut[1:]):
        # Find the associated Freesurfer id
        free_name = requested_labels_mapping[lab_val]['free_name']
        lut_name = requested_labels_mapping[lab_val]['lut_name']

        # Find the mean y position of the label to be able to spatially sort.
        positions = np.where(full_labels == lab_val)
        mean_y = np.mean(positions[1])

        row_name_map[id] = {
            'free_name': free_name,
            'lut_name': lut_name,
            'free_label': lab_val,
            'mean_y_pos': mean_y
        }

    with open(args.out_row_map, 'w') as f:
        pickle.dump(row_name_map, f)
                                seeds=condition_seeds)
    affine = streamline_generator.affine
    streamlines = list(streamline_generator)

else:
    print '\tTracking already complete'

#=============================================================================
# Create two connectivity matrices - symmetric and directional
#=============================================================================
if not os.path.exists(Msym_file) and not os.path.exists(Mdir_file):

    print '\tCreating Connectivity Matrix'
    Msym, grouping = utils.connectivity_matrix(streamlines,
                                               parcellation_wm_data,
                                               affine=affine,
                                               return_mapping=True,
                                               symmetric=True,
                                               mapping_as_streamlines=True)

    Mdir, grouping = utils.connectivity_matrix(streamlines,
                                               parcellation_wm_data,
                                               affine=affine,
                                               return_mapping=True,
                                               symmetric=False,
                                               mapping_as_streamlines=True)

else:
    Msym = np.loadtxt(Msym_file)
    Mdir = np.loadtxt(Mdir_file)

# Calculate the difference the two directions
Exemple #27
0
   :align: center

   **Corpus Callosum Sagittal**
"""
"""
Once we've targeted on the corpus callosum ROI, we might want to find out which
regions of the brain are connected by these streamlines. To do this we can use
the ``connectivity_matrix`` function. This function takes a set of streamlines
and an array of labels as arguments. It returns the number of streamlines that
start and end at each pair of labels and it can return the streamlines grouped
by their endpoints. Notice that this function only considers the endpoints of
each streamline.
"""

M, grouping = utils.connectivity_matrix(cc_streamlines, labels, affine=affine,
                                        return_mapping=True,
                                        mapping_as_streamlines=True)
M[:3, :] = 0
M[:, :3] = 0

"""
We've set ``return_mapping`` and ``mapping_as_streamlines`` to ``True`` so that
``connectivity_matrix`` returns all the streamlines in ``cc_streamlines``
grouped by their endpoint.

Because we're typically only interested in connections between gray matter
regions, and because the label 0 represents background and the labels 1 and 2
represent white matter, we discard the first three rows and columns of the
connectivity matrix.

We can now display this matrix using matplotlib, we display it using a log
Exemple #28
0
   **Corpus Callosum Sagittal**
"""
"""
Once we've targeted on the corpus callosum ROI, we might want to find out which
regions of the brain are connected by these streamlines. To do this we can use
the ``connectivity_matrix`` function. This function takes a set of streamlines
and an array of labels as arguments. It returns the number of streamlines that
start and end at each pair of labels and it can return the streamlines grouped
by their endpoints. Notice that this function only considers the endpoints of
each streamline.
"""

M, grouping = utils.connectivity_matrix(cc_streamlines,
                                        labels,
                                        affine=affine,
                                        return_mapping=True,
                                        mapping_as_streamlines=True)
M[:3, :] = 0
M[:, :3] = 0
"""
We've set ``return_mapping`` and ``mapping_as_streamlines`` to ``True`` so that
``connectivity_matrix`` returns all the streamlines in ``cc_streamlines``
grouped by their endpoint.

Because we're typically only interested in connections between gray matter
regions, and because the label 0 represents background and the labels 1 and 2
represent white matter, we discard the first three rows and columns of the
connectivity matrix.

We can now display this matrix using matplotlib, we display it using a log
Exemple #29
0

img_labels = nib.load(label_filename)
labels = img_labels.get_data().astype('int')


streamlines = [s[0] for s in nib.trackvis.read(streamlines_filename)[0]]
f_streamlines = []
for sl in streamlines:
    if sl.shape[0] > 1 and np.all(sl >= 0):
        # Streamline points must be in voxel coordinates
        f_streamlines.append(sl) # devided by voxel size

M, mapping = utils.connectivity_matrix(f_streamlines,
                                       labels,
                                       affine=np.identity(4),
                                       symmetric=True,
                                       return_mapping=True,
                                       mapping_as_streamlines=False)
M = M[1:, 1:]
M = np.tril(M, -1)

# on trouve la region qui possede le plus de streamline et on l'enregistre
selected = []
max_len = 0
for i, j in mapping.keys():
    if i == 0 or j == 0 or i == j:
        continue
    if len(mapping[i, j]) > max_len:
        max_len = len(mapping[i, j])
        tuple = (i, j)
Exemple #30
0
def save_results(table, area_pairs, medium, subject_name):
    with open('saved/{}_track_result'.format(subject_name), 'wb') as f:
        pickle.dump(table, f)

    streamlines = table['streamlines']
    sft = table['sft']
    sub_affine = table['affine']
    data = table['data']
    labels = table["labels"]

    affine = np.eye(4)

    # extract streamlines only pass through ROIs
    cc_slice = (labels == medium[0])  # ROIs
    if len(medium) > 1:
        for m in medium:
            cc_slice = cc_slice | (labels == m)

    cc_streamlines = utils.target(streamlines, affine, cc_slice)
    cc_streamlines = Streamlines(cc_streamlines)

    other_streamlines = utils.target(streamlines,
                                     affine,
                                     cc_slice,
                                     include=False)
    other_streamlines = Streamlines(other_streamlines)
    assert len(other_streamlines) + len(cc_streamlines) == len(streamlines)

    M, grouping = utils.connectivity_matrix(streamlines,
                                            affine,
                                            labels,
                                            return_mapping=True,
                                            mapping_as_streamlines=True)
    M[:3, :] = 0
    M[:, :3] = 0

    for pair in area_pairs:
        track = grouping[pair[0], pair[1]]
        shape = labels.shape
        dm = utils.density_map(track, affine, shape)

        import nibabel as nib

        # Save density map
        dm_img = nib.Nifti1Image(dm.astype("int16"), sub_affine)
        dm_img.to_filename("saved/{}_{}.nii.gz".format(subject_name, pair))


#     save_trk(sft, "tractogram_probabilistic_dg.trk")

#     # visualzie
#     # Enables/disables interactive visualization
#     interactive = False

#     if has_fury:
#         # Prepare the display objects.
#         color = colormap.line_colors(streamlines)

#         streamlines_actor = actor.line(streamlines, color)

#         # Create the 3D display.
#         r = window.Scene()
#         r.add(streamlines_actor)

#         # Save still images for this static example. Or for interactivity use
#         window.record(r, out_path='fiber_tracking_Caudate_l_r_result.png', size=(800, 800))

#         if interactive:
#             window.show(r)
Exemple #31
0
def run(context):

    ####################################################
    # Get the path to input files  and other parameter #
    ####################################################
    analysis_data = context.fetch_analysis_data()
    settings = analysis_data['settings']
    postprocessing = settings['postprocessing']
    dataset = settings['dataset']

    if dataset == "HCPL":
        dwi_file_handle = context.get_files('input', modality='HARDI')[0]
        dwi_file_path = dwi_file_handle.download('/root/')

        bvalues_file_handle = context.get_files(
            'input', reg_expression='.*prep.bvalues.hcpl.txt')[0]
        bvalues_file_path = bvalues_file_handle.download('/root/')
        bvecs_file_handle = context.get_files(
            'input', reg_expression='.*prep.gradients.hcpl.txt')[0]
        bvecs_file_path = bvecs_file_handle.download('/root/')
    elif dataset == "DSI":
        dwi_file_handle = context.get_files('input', modality='DSI')[0]
        dwi_file_path = dwi_file_handle.download('/root/')
        bvalues_file_handle = context.get_files(
            'input', reg_expression='.*prep.bvalues.txt')[0]
        bvalues_file_path = bvalues_file_handle.download('/root/')
        bvecs_file_handle = context.get_files(
            'input', reg_expression='.*prep.gradients.txt')[0]
        bvecs_file_path = bvecs_file_handle.download('/root/')
    else:
            context.set_progress(message='Wrong dataset parameter')

    inject_file_handle = context.get_files(
        'input', reg_expression='.*prep.inject.nii.gz')[0]
    inject_file_path = inject_file_handle.download('/root/')

    VUMC_ROIs_file_handle = context.get_files(
        'input', reg_expression='.*VUMC_ROIs.nii.gz')[0]
    VUMC_ROIs_file_path = VUMC_ROIs_file_handle.download('/root/')

    ###############################
    # _____ _____ _______     __  #
    # |  __ \_   _|  __ \ \   / / #
    # | |  | || | | |__) \ \_/ /  #
    # | |  | || | |  ___/ \   /   #
    # | |__| || |_| |      | |    #
    # |_____/_____|_|      |_|    #
    #                             #
    ###############################

    ########################################################################################
    #  _______             _          __  __   _______             _     __                #
    # |__   __|           | |        |  \/  | |__   __|           | |   / _|               #
    #    | |_ __ __ _  ___| | ___   _| \  / | ___| |_ __ __ _  ___| | _| |_ __ _  ___ ___  #
    #    | | '__/ _` |/ __| |/ / | | | |\/| |/ __| | '__/ _` |/ __| |/ /  _/ _` |/ __/ _ \ #
    #    | | | | (_| | (__|   <| |_| | |  | | (__| | | | (_| | (__|   <| || (_| | (_|  __/ #
    #    |_|_|  \__,_|\___|_|\_\\__, |_|  |_|\___|_|_|  \__,_|\___|_|\_\_| \__,_|\___\___| #
    #                            __/ |                                                     #
    #                           |___/                                                      #
    #                                                                                      #
    #                                                                                      #
    #                               IronTract Team                                         #
    ########################################################################################

    #################
    # Load the data #
    #################
    dwi_img = nib.load(dwi_file_path)
    bvals, bvecs = read_bvals_bvecs(bvalues_file_path,
                                    bvecs_file_path)
    gtab = gradient_table(bvals, bvecs)

    ############################################
    # Extract the brain mask from the b0 image #
    ############################################
    _, brain_mask = median_otsu(dwi_img.get_data()[:, :, :, 0],
                                median_radius=2, numpass=1)

    ##################################################################
    # Fit the tensor model and compute the fractional anisotropy map #
    ##################################################################
    context.set_progress(message='Processing voxel-wise DTI metrics.')
    tenmodel = TensorModel(gtab)
    tenfit = tenmodel.fit(dwi_img.get_data(), mask=brain_mask)
    FA = fractional_anisotropy(tenfit.evals)
    stopping_criterion = ThresholdStoppingCriterion(FA, 0.2)

    sphere = get_sphere("repulsion724")
    seed_mask_img = nib.load(inject_file_path)
    affine = seed_mask_img.affine
    seeds = utils.random_seeds_from_mask(seed_mask_img.get_data(),
                                         affine,
                                         seed_count_per_voxel=True,
                                         seeds_count=5000)

    if dataset == "HCPL":
        ################################################
        # Compute Fiber Orientation Distribution (CSD) #
        ################################################
        context.set_progress(message='Processing voxel-wise FOD estimation.')

        response, _ = auto_response_ssst(gtab, dwi_img.get_data(),
                                         roi_radii=10, fa_thr=0.7)
        csd_model = ConstrainedSphericalDeconvModel(gtab, response, sh_order=8)
        csd_fit = csd_model.fit(dwi_img.get_data(), mask=brain_mask)
        shm = csd_fit.shm_coeff

        prob_dg = ProbabilisticDirectionGetter.from_shcoeff(shm,
                                                            max_angle=20.,
                                                            sphere=sphere,
                                                            pmf_threshold=0.1)
    elif dataset == "DSI":
        context.set_progress(message='Processing voxel-wise DSI estimation.')
        dsmodel = DiffusionSpectrumModel(gtab)
        dsfit = dsmodel.fit(dwi_img.get_data())
        ODFs = dsfit.odf(sphere)
        prob_dg = ProbabilisticDirectionGetter.from_pmf(ODFs,
                                                        max_angle=20.,
                                                        sphere=sphere,
                                                        pmf_threshold=0.01)

    ###########################################
    # Compute DIPY Probabilistic Tractography #
    ###########################################
    context.set_progress(message='Processing tractography.')
    streamline_generator = LocalTracking(prob_dg, stopping_criterion, seeds,
                                         affine, step_size=.2, max_cross=1)
    streamlines = Streamlines(streamline_generator)
    # sft = StatefulTractogram(streamlines, seed_mask_img, Space.RASMM)
    # streamlines_file_path = "/root/streamlines.trk"
    # save_trk(sft, streamlines_file_path)

    ###########################################################################
    # Compute 3D volumes for the IronTract Challenge. For 'EPFL', we only     #
    # keep streamlines with length > 1mm. We compute the visitation  count    #
    # image and apply a small gaussian smoothing. The gaussian smoothing      #
    # is especially usefull to increase voxel coverage of deterministic       #
    # algorithms. The log of the smoothed visitation count map is then        #
    # iteratively thresholded producing 200 volumes/operation points.         #
    # For VUMC, additional streamline filtering is done using anatomical      #
    # priors (keeping only streamlines that intersect with at least one ROI). #
    ###########################################################################
    if postprocessing in ["EPFL", "ALL"]:
        context.set_progress(message='Processing density map (EPFL)')
        volume_folder = "/root/vol_epfl"
        output_epfl_zip_file_path = "/root/TrackyMcTrackface_EPFL_example.zip"
        os.mkdir(volume_folder)
        lengths = length(streamlines)
        streamlines = streamlines[lengths > 1]
        density = utils.density_map(streamlines, affine, seed_mask_img.shape)
        density = scipy.ndimage.gaussian_filter(density.astype("float32"), 0.5)

        log_density = np.log10(density + 1)
        max_density = np.max(log_density)
        for i, t in enumerate(np.arange(0, max_density, max_density / 200)):
            nbr = str(i)
            nbr = nbr.zfill(3)
            mask = log_density >= t
            vol_filename = os.path.join(volume_folder,
                                        "vol" + nbr + "_t" + str(t) + ".nii.gz")
            nib.Nifti1Image(mask.astype("int32"), affine,
                            seed_mask_img.header).to_filename(vol_filename)
        shutil.make_archive(output_epfl_zip_file_path[:-4], 'zip', volume_folder)

    if postprocessing in ["VUMC", "ALL"]:
        context.set_progress(message='Processing density map (VUMC)')
        ROIs_img = nib.load(VUMC_ROIs_file_path)
        volume_folder = "/root/vol_vumc"
        output_vumc_zip_file_path = "/root/TrackyMcTrackface_VUMC_example.zip"
        os.mkdir(volume_folder)
        lengths = length(streamlines)
        streamlines = streamlines[lengths > 1]

        rois = ROIs_img.get_fdata().astype(int)
        _, grouping = utils.connectivity_matrix(streamlines, affine, rois,
                                                inclusive=True,
                                                return_mapping=True,
                                                mapping_as_streamlines=False)
        streamlines = streamlines[grouping[(0, 1)]]

        density = utils.density_map(streamlines, affine, seed_mask_img.shape)
        density = scipy.ndimage.gaussian_filter(density.astype("float32"), 0.5)

        log_density = np.log10(density + 1)
        max_density = np.max(log_density)
        for i, t in enumerate(np.arange(0, max_density, max_density / 200)):
            nbr = str(i)
            nbr = nbr.zfill(3)
            mask = log_density >= t
            vol_filename = os.path.join(volume_folder,
                                        "vol" + nbr + "_t" + str(t) + ".nii.gz")
            nib.Nifti1Image(mask.astype("int32"), affine,
                            seed_mask_img.header).to_filename(vol_filename)
        shutil.make_archive(output_vumc_zip_file_path[:-4], 'zip', volume_folder)

    ###################
    # Upload the data #
    ###################
    context.set_progress(message='Uploading results...')
    #context.upload_file(fa_file_path, 'fa.nii.gz')
    # context.upload_file(fod_file_path, 'fod.nii.gz')
    # context.upload_file(streamlines_file_path, 'streamlines.trk')
    if postprocessing in ["EPFL", "ALL"]:
        context.upload_file(output_epfl_zip_file_path,
                            'TrackyMcTrackface_' + dataset +'_EPFL.zip')
    if postprocessing in ["VUMC", "ALL"]:
        context.upload_file(output_vumc_zip_file_path,
                            'TrackyMcTrackface_' + dataset +'_VUMC.zip')
Exemple #32
0
hdr['dim'] = fa.shape
tensor_streamlines_trk = ((sl, None, None) for sl in csd_streamlines)
ten_sl_fname = 'tensor_streamlines.trk'
nib.trackvis.write(ten_sl_fname,
                   tensor_streamlines_trk,
                   hdr,
                   points_space='voxel')

print np.shape(csd_streamlines)
#atlas = nib.load('atlas_reg.nii.gz')
atlas = nib.load('atlas_reg.nii.gz')
labels = atlas.get_data()
labelsint = labels.astype(int)

#M, grouping = utils.connectivity_matrix(csd_streamlines, labelsint, affine=affine,    return_mapping=True,  mapping_as_streamlines=True)
M = utils.connectivity_matrix(csd_streamlines, labelsint, affine=affine)

#Remove background
M = M[1:, 1:]
#Remove the last rows and columns since they are cerebellum and brainstem
M = M[:90, :90]
''' 
#Reshuffle making all left areas first right areas
odd_odd = M[::2, ::2]
odd_even = M[::2, 1::2]
first = np.vstack((odd_odd,odd_even))
even_odd = M[1::2, ::2]
even_even= M[1::2, 1::2]
second = np.vstack((even_odd,even_even))
M = np.hstack((first,second))
'''
Exemple #33
0
# #pair labels target control animals
# streams_fix_control = lambda : (sl for sl in streams_control if len(sl)>1)
# streamlines_control = Streamlines(streams_fix_control())
# M_control, grouping_control = utils.connectivity_matrix(streamlines_control, labels_pair_control,
#                                                         affine=affine_labels_control, return_mapping=True,
#                                                         mapping_as_streamlines=True)

# target_streamlines_control = grouping_control[target_l, target_r]

# %%
#target control animals
streams_fix_control = lambda: (sl for sl in streams_control if len(sl) > 1)
streamlines_control = Streamlines(streams_fix_control())
M_control, grouping_control = utils.connectivity_matrix(
    streamlines_control,
    labels_control,
    affine=affine_labels_control,
    return_mapping=True,
    mapping_as_streamlines=True)

target_streamlines_control = grouping_control[target_l, target_r]

# %%
#cluster control animals
target_qb_control = QuickBundles(threshold=distance1, metric=metric1)
target_clusters_control = target_qb_control.cluster(target_streamlines_control)
print("Control Nb. clusters:", len(target_clusters_control))

# %%
#group calculation
for k in range(2):
    agegroup = age[k]
Exemple #34
0
    def _run_interface(self, runtime):
        from dipy.reconst.shm import CsaOdfModel
        from dipy.direction import peaks_from_model
        from dipy.data import default_sphere
        from dipy.core.gradients import gradient_table
        from dipy.tracking import utils
        import nibabel as nib
        from dipy.core.gradients import gradient_table
        import dipy.reconst.dti as dti
        from dipy.reconst.dti import color_fa, fractional_anisotropy, quantize_evecs
        from dipy.data import get_sphere
        from dipy.tracking.eudx import EuDX
        from dipy.tracking.utils import connectivity_matrix
        import numpy as np
        from dipy.tracking.streamline import Streamlines
        from nilearn import plotting
        import matplotlib

        dwi_img = nib.load(self.inputs.in_file)
        dwi_data = dwi_img.get_data()
        dwi_affine = dwi_img.affine

        mask_img = nib.load(self.inputs.mask_file)
        mask_data = mask_img.get_data().astype(bool)
        mask_affine = mask_img.affine

        step_size = 0.5
        max_angle = 30
        density = 2
        lenght_threshold = 10
        sh_order = 6
        min_separation_angle = 30
        relative_peak_threshold = .5
        threshold_tissue_classifier = .05

        gtab = gradient_table(self.inputs.bval_path, self.inputs.bvec_path, b0_threshold=50)

        # seeds = utils.seeds_from_mask(mask_data, density=density, affine=np.eye(4))

        '''
        #DTI reconstruction and EuDx tracking
        tensor_model = dti.TensorModel(gtab)
        tensor_fitted = tensor_model.fit(data, mask)
        FA = fractional_anisotropy(tensor_fitted.evals)

        peak_indices = quantize_evecs(tensor_fitted.evecs, default_sphere.vertices)

        streamlines_generator = EuDX(FA.astype('f8'), peak_indices, 
                                    odf_vertices=default_sphere.vertices, 
                                    a_low=threshold_tissue_classifier,
                                    step_sz=step_size,
                                    seeds=1000000)
        '''

        # QBall reconstruction and EuDX tracking
        csa_model = CsaOdfModel(gtab, sh_order=sh_order)

        csa_peaks = peaks_from_model(model=csa_model,
                                     data=dwi_data,
                                     sphere=default_sphere,
                                     relative_peak_threshold=relative_peak_threshold,
                                     min_separation_angle=min_separation_angle,
                                     mask=mask_data)

        streamlines_generator = EuDX(csa_peaks.peak_values, csa_peaks.peak_indices,
                                     odf_vertices=default_sphere.vertices,
                                     a_low=threshold_tissue_classifier, step_sz=step_size,
                                     seeds=1000000)

        self.save(streamlines_generator, streamlines_generator.affine, mask_data.shape, 'tractography2.trk',
                  lenght_threshold)

        strem = Streamlines(streamlines_generator, buffer_size=512)
        labels = nib.load(self.inputs.image_parcellation_path).get_data().astype(int)

        M, grouping = connectivity_matrix(strem, labels, affine=streamlines_generator.affine,
                                          return_mapping=True,
                                          mapping_as_streamlines=True)

        M = M[1:, 1:]  # Removing firsts column and row (Index = 0 is the background)
        M[range(M.shape[0]), range(M.shape[0])] = 0  # Removing element over the diagonal

        np.savetxt('Jij.csv', M, delimiter=',', fmt='%d')

        fig, ax = matplotlib.pyplot.subplots()
        plotting.plot_matrix(M, colorbar=True, figure=fig)
        fig.savefig('Jij.png', dpi=1200)

        return runtime
                                a_low=.05, step_sz=.5, seeds=condition_seeds)
    affine = streamline_generator.affine
    streamlines = list(streamline_generator)
    
else:
    print '\tTracking already complete'

#=============================================================================
# Create two connectivity matrices - symmetric and directional
#=============================================================================
if not os.path.exists(Msym_file) and not os.path.exists(Mdir_file):
 
    print '\tCreating Connectivity Matrix'
    Msym, grouping = utils.connectivity_matrix(streamlines, parcellation_wm_data,
                                                    affine=affine,
                                                    return_mapping=True,
                                                    symmetric=True,
                                                    mapping_as_streamlines=True)
                                            
    Mdir, grouping = utils.connectivity_matrix(streamlines, parcellation_wm_data,
                                                    affine=affine,
                                                    return_mapping=True,
                                                    symmetric=False,
                                                    mapping_as_streamlines=True)
 
else:
    Msym = np.loadtxt(Msym_file)
    Mdir = np.loadtxt(Mdir_file)
    
# Calculate the difference the two directions
streamsObjIN=nib.streamlines.load(smallTractogramPath)

#because of how dipy does connectivity matrtricies, we have to relabel the atlas
remappingFrame=currentParcellationEntries.reset_index(drop=True)
#establish a copy
relabeledAtlas=atlasData.copy()
#iterate across unique label entries
for iLabels in range(len(remappingFrame)):
    #replace the current uniqueAtlasEntries value with the iLabels value
    #constitutes a simple renumbering schema
    relabeledAtlas[relabeledAtlas==uniqueAtlasEntries[iLabels]]=iLabels

from dipy.tracking import utils
#segment tractome into connectivity matrix from parcellation
M, grouping=utils.connectivity_matrix(streamsObjIN.tractogram.streamlines, atlasImg.affine, label_volume=relabeledAtlas.astype(int),
                        return_mapping=True,
                        mapping_as_streamlines=False)


# Now that we have performed that lengthy segmentation, lets take a quantative look at how many streamlines are connecting each region.  We'll first do this using the standard method of of [connectomics](https://en.wikipedia.org/wiki/Connectomics), a connectivity matrix.  In brain connectivity matrix plot, each row/column corresponds to a brain area in a given parcellation.  Each entry (i.e. row X, column Y) is represented by a scaled color and corresponds to the measure of connectivity between those brain areas.  In the matrix we will plot below, that color value will correspond to the number of streamlines connecting those areas.
# 
# As a warning, these plots are somewhat difficult to comprehend in that it is difficult to associate particular areas with a trend or insight.  Typically these plots are used to depict and infer genreal patterns in the overall connectivity arrangment of the brain.

# In[3]:


import seaborn as sns
sns.heatmap(np.log1p(M))


# Here we'll also present the dataframe containing the remapped numberings (in the index column) to reference with the above figure.  Remember, each row corresponds (and column) corresponds to a specific label in the parcellation.  Due to the renumbering we performed, the index column of the below table, indicates which label is associated with which row of the above matrix figure.
Exemple #37
0
    '/Users/plab/Documents/JupyerData/proj-5c50b6f12daf2e0032f880eb/sub-100206/dt-neuro-parcellation-volume.id-5c50c3f7ecd2f200ccfe9fae/parc.nii.gz'
)

from ipywidgets import interact, interactive, fixed, interact_manual
from ipywidgets import FloatSlider

niftiData = ref_nifti.get_data()
niftiDataInt = niftiData.astype(np.uint16)

lengthsArray = np.array(lengths)

from dipy.tracking import utils

M, grouping = utils.connectivity_matrix(
    streamlines=streamsObjIN.tractogram.streamlines,
    affine=ref_nifti.affine,
    label_volume=niftiDataInt.astype(np.int16),
    return_mapping=True,
    mapping_as_streamlines=False)

uniqueKeys = np.unique(np.ndarray.flatten(niftiDataInt))

import itertools
keyList = list(itertools.combinations(range(roiTotal), 2))

roiTotal = np.max(uniqueKeys) + 1

countMatrix = np.zeros([roiTotal, roiTotal])

import itertools
keyList = list(itertools.combinations(range(roiTotal), 2))
	def _run_interface(self, runtime):
		# Loading the ROI file
	    from dipy.tracking import utils 
	    import nibabel as nib
	    import numpy as np
	    import os 
	    
	    img = nib.load(self.inputs.ROI_file)
	    data = img.get_data()
	    affine = img.get_affine()

	    # Getting ROI volumes if they haven't been generated
		if not os.path.isfile('/imaging/jb07/CALM/DWI/FA_connectome/Atlas_volumes.csv'):
			import nibabel as nib
			import numpy as np
			import os 
			import pandas as pd
			import subprocess

			atlas_file = ROI_file
			img = nib.load(atlas_file)
			data = img.get_data()
			affine = img.get_affine()
			volumes = pd.DataFrame()

			atlas_labels = np.unique(data)

			for atlas_label in atlas_labels:
				data = nib.load(atlas_file).get_data()
				data[data != atlas_label] = 0 
				data[data == atlas_label] = 1
				nib.save(nib.Nifti1Image(data, affine), 'temp.nii.gz')
				volumes.set_value(atlas_label, 'volume', subprocess.check_output(os.environ['FSLDIR'] + '/bin/fslstats temp.nii.gz -V', shell=True).split(' ')[0])

			os.remove('temp.nii.gz')
			volumes.to_csv('/imaging/jb07/CALM/DWI/FA_connectome/Atlas_volumes.csv')

		ROI_volumes = pd.read_csv('/home/jb07/CALM/DWI/FA_connectome/Atlas_volumes.csv')

		# Getting the FA file
		img = nib.load(self.inputs.FA_file)
		FA_data = img.get_data()
		FA_affine = img.get_affine()

		# Loading the streamlines
		from nibabel import trackvis
		streams, hdr = trackvis.read(self.inputs.trackfile,points_space='rasmm')
		streamlines = [s[0] for s in streams]
		streamlines_affine = trackvis.aff_from_hdr(hdr,atleast_v2=True)

		# Checking for negative values
		from dipy.tracking._utils import _mapping_to_voxel, _to_voxel_coordinates
		endpoints = [sl[0::len(sl)-1] for sl in streamlines]
		lin_T, offset = _mapping_to_voxel(affine, (1.,1.,1.))
		inds = np.dot(endpoints, lin_T)
		inds += offset
		negative_values = np.where(inds <0)[0]
		for negative_value in sorted(negative_values, reverse=True):
			del streamlines[negative_value]

		# Constructing the streamlines matrix
		matrix,mapping = utils.connectivity_matrix(streamlines=streamlines,label_volume=data,affine=streamlines_affine,symmetric=True,return_mapping=True,mapping_as_streamlines=True)
		matrix[matrix < 10] = 0

		# Constructing the FA matrix
		dimensions = matrix.shape
		FA_matrix = np.empty(shape=dimensions)
		density_matrix = np.empty(shape=dimensions)
		density_corrected_matrix = np.empty(shape=dimensions)

		for i in range(0,dimensions[0]):
		    for j in range(0,dimensions[1]):
		        if matrix[i,j]:
		            dm = utils.density_map(mapping[i,j], FA_data.shape, affine=streamlines_affine)
		            FA_matrix[i,j] = np.mean(FA_data[dm>0])
		            if np.sum(dm > 0) > 0:
		            	density_matrix[i,j] = np.sum(dm[dm > 0])
		            	density_corrected_matrix[i,j] = np.sum(dm[dm > 0])/np.sum([ROI_volumes.iloc[i].values.astype('int'), ROI_volumes.iloc[j].values.astype('int')])
		            else: 
		            	density_matrix[i,j] = 0
		            	density_corrected_matrix[i,j] = 0
		        else:
		            FA_matrix[i,j] = 0
		            density_matrix[i,j] = 0
		            density_corrected_matrix[i,j] = 0

		FA_matrix[np.tril_indices(n=len(FA_matrix))] = 0
		FA_matrix = FA_matrix.T + FA_matrix - np.diagonal(FA_matrix)

		density_matrix[np.tril_indices(n=len(density_matrix))] = 0
		density_matrix = density_matrix.T + density_matrix - np.diagonal(density_matrix)

		density_corrected_matrix[np.tril_indices(n=len(density_corrected_matrix))] = 0
		density_corrected_matrix = density_corrected_matrix.T + density_corrected_matrix - np.diagonal(density_corrected_matrix)

		from nipype.utils.filemanip import split_filename
		_, base, _ = split_filename(self.inputs.trackfile)
		np.savetxt(base + '_FA_matrix.txt',FA_matrix,delimiter='\t')
		np.savetxt(base + '_density_matrix.txt',density_matrix,delimiter='\t')
		np.savetxt(base + '_volume_corrected_density_matrix.txt',density_corrected_matrix,delimiter='\t')
    #Cut streamlines
    streamlines = [sl for sl in streamlines if length(sl).astype(np.int) > 3]
    print 'we get {} streamlines'.format(len(streamlines))
    print 'cutting short streamlines finished'

    save_trk(outpath + 'connectivity_csv/' + runno + '_streamlines.trk',
             streamlines=streamlines,
             affine=np.eye(4))
    print 'streamlines saved'

    print 'building connectivity matrix begins'
    st5 = time.time()
    M = utils.connectivity_matrix(streamlines,
                                  labels_,
                                  affine=affine,
                                  return_mapping=False,
                                  mapping_as_streamlines=False)
    del streamlines
    gc.collect()
    print 'streamlines delete to save memory'

    M = M[1:, 1:]
    et5 = time.time() - st5
    np.savetxt(outpath + 'connectivity_csv/' + runno + '_connectivitybm.csv',
               M,
               delimiter=',')
    print(runno +
          'connectivity matrix csv saved, the running time is {}'.format(et5))

    del M