Пример #1
0
def cc_seg_mp(vol_path, streamlines_path, labels):
    """
    Make probabilistic map (pm)
    Parameters
    ----------
    vol_path: volume (T1w) data path
    streamlimes_path: streamlines path
    labels: label of each streamline

    Return
    ------
    pm: probabilisic map
    """
    img = nib.load(vol_path)
    dim4 = (len(set(labels)),)
    cc_mp = np.zeros(img.shape + dim4, dtype=float)
    fasciculus = Fasciculus(streamlines_path)
    streamlines = fasciculus.get_data()
    fibs_points = apply_affine(npl.inv(img.affine), fasciculus.xmin_nodes()).astype(int)
    for i in range(len(streamlines)):
        index_i = fibs_points == np.array([fibs_points[i][0], fibs_points[i][1], fibs_points[i][2]])
        index_i = index_i.sum(axis=1)
        inddex = index_i == 3
        voxel_counts = np.sum(index_i == 3)
        dim4_value = []
        for label in set(labels):
            index_lab = labels == label
            arr = np.vstack((inddex, index_lab)).sum(axis=0)
            dim4_value.append(np.sum(arr == 2) / voxel_counts)
        cc_mp[fibs_points[i][0], fibs_points[i][1], fibs_points[i][2]] = dim4_value

    return cc_mp
Пример #2
0
def muti_bundle_registration(paths_file, pts=12):
    """
    muti-bundle registration and consolidation
    Parameters
    ----------
    paths_file: list; muti_bundle file path
    pts: each streamline is divided into sections

    Return
    ------
    new header: include id of each streamline that comes from different subjects
    registration and consolidation bundle
    """
    fas = Fasciculus(paths_file[0])
    # print fas.get_header()
    bundle_header = {'fasciculus_id': None}
    sub1 = fas.get_data()
    bundle_header['fasciculus_id'] = len(sub1) * [
        int(paths_file[0].split('/')[9])
    ]
    sub2 = Fasciculus(paths_file[1]).get_data()
    subj2_aligned = bundle_registration(sub1, sub2, pts=pts)
    bundle = fas.fib_merge(sub1, subj2_aligned)
    bundle_header['fasciculus_id'] += (
        len(bundle) - len(sub1)) * [int(paths_file[1].split('/')[9])]
    # print bundle_header
    # print len(bundle)
    for index in range(len(paths_file))[2:]:
        # print paths_file[index]
        sub = Fasciculus(paths_file[index]).get_data()
        sub_aligned = bundle_registration(sub1, sub, pts=pts)
        lenth = len(bundle)
        bundle = fas.fib_merge(bundle, sub_aligned)
        bundle_header['fasciculus_id'] += (
            len(bundle) - lenth) * [int(paths_file[index].split('/')[9])]

    fas.update_header(bundle_header)
    fas.set_data(nibas.ArraySequence(bundle))

    return fas
Пример #3
0
from dipy.tracking.utils import length
from pyfat.algorithm.fiber_clustering import FibClustering
from pyfat.viz.colormap import create_random_colormap
from pyfat.viz.fiber_simple_viz_advanced import fiber_simple_3d_show_advanced


fib = '/home/brain/workingdir/data/dwi/hcp/preprocessed/' \
      'response_dhollander/101107/Diffusion/1M_20_01_20dynamic250_SD_Stream_rhemi_occipital5.tck'
# fib = '/home/brain/workingdir/data/dwi/hcp/preprocessed/' \
#            'response_dhollander/101107/Diffusion/1M_20_01_20dynamic250_SD_Stream_occipital8_lr5.tck'
img_path = '/home/brain/workingdir/data/dwi/hcp/preprocessed/' \
           'response_dhollander/101107/Structure/T1w_acpc_dc_restore_brain1.25.nii.gz'

img = nib.load(img_path)
fa = Fasciculus(fib)
streamlines = fa.get_data()
length_t = fa.get_lengths()
ind = length_t > 10
streamlines = streamlines[ind]
fa.set_data(streamlines)
fibcluster = FibClustering(fa)
print len(streamlines)

# 1
qb = QuickBundles(streamlines, 2)
clusters = qb.clusters()
print qb.clusters_sizes()
indexs = []
for i in range(len(clusters)):
    if clusters[i]['N'] >= 400:
        indexs += clusters[i]['indices']
Пример #4
0
           'response_dhollander/100408/result/result20vs45/cc_20fib_1.5lr_new_hierarchical_single_cc_splenium.tck'
# tck_path = '/home/brain/workingdir/data/dwi/hcp/preprocessed/' \
#            'response_dhollander/100408/result/result20vs45/cc_20fib_only_node.tck'
# tck_path = '/home/brain/workingdir/data/dwi/hcp/preprocessed/' \
#            'response_dhollander/100408/result/result20vs45/cc_20fib_step20_new_sample5000.tck'
# load data
data_path = '/home/brain/workingdir/data/dwi/hcp/preprocessed/' \
             'response_dhollander/100408/Structure/T1w_acpc_dc_restore_brain.nii.gz'
img = nib.load(data_path)

fasciculus = Fasciculus(tck_path)
fibcluster = FibClustering(fasciculus)
# length_clusters = fibcluster.length_seg()
# streamlines = fasciculus.sort_streamlines()
# fasciculus.set_data(streamlines)
streamlines = fasciculus.get_data()
print len(streamlines)

# d = fibcluster.bundle_seg(streamlines, dist_thre=15)
# print len(d[1])
index_streams = fibcluster.bundle_thre_seg(streamlines)
print len(index_streams[1])
centroids = fibcluster.bundle_centroids(streamlines,
                                        cluster_thre=10,
                                        dist_thre=10)
subject_id = "100408"
subjects_dir = "/home/brain/workingdir/data/dwi/hcp/preprocessed/response_dhollander/100408"
hemi = 'both'
surf = 'inflated'
alpha = 1
# vertex = clusters_terminus2surface_mpm(index_streams[1], geo_path)
Пример #5
0
import matplotlib.pyplot as plt

# load data
data_path = '/home/brain/workingdir/data/dwi/hcp/preprocessed/' \
             'response_dhollander/100408/Structure/T1w_acpc_dc_restore_brain.nii.gz'
img = nib.load(data_path)
img_data = img.get_data()
# tck_path = '/home/brain/workingdir/data/dwi/hcp/preprocessed/' \
#        'response_dhollander/100408/result/result20vs45/cc_20fib_step20_new_sample5000.tck'
tck_path = '/home/brain/workingdir/data/dwi/hcp/preprocessed/' \
           'response_dhollander/100408/result/result20vs45/cc_20fib_lr1.5_01_new_correct.tck'

imgtck = load_tck(tck_path)
fasciculus = Fasciculus(tck_path)
streamstck = fasciculus.get_data()
# print streamstck

# extract node according to x-value
Ls_temp = fasciculus.xmin_nodes()
print len(Ls_temp)
# show node or density
# show_2d_node(img, Ls_temp)
# show_slice_density(img, Ls_temp)

# knn_graph = kneighbors_graph(Ls_temp, 10, include_self=False)
Ls_temp_labels, Ls_temp_centers = NodeClustering(Ls_temp).k_means()
sdist = pdist(Ls_temp_centers)
knn_graph = linkage(sdist, method='single', metric='euclidean')
print knn_graph