if count1 == 0:
        sub_data1 = sp.zeros((d1.shape[0], d1.shape[1], len(lst)))
        sub_data2 = sp.zeros((d2.shape[0], d2.shape[1], len(lst)))

    sub_data1[:, :, count1] = d1
    sub_data2[:, :, count1] = d2

    count1 += 1
    print count1,

nSub = sub_data1.shape[2]

cat_data = sp.zeros((2 * nSub * sub_data1.shape[0], sub_data1.shape[1]))

for ind in range(nSub):
    sub_data1[:, :, ind], _ = rot_sub_data(ref=sub_data1[:, :, 0],
                                           sub=sub_data1[:, :, ind])
    sub_data2[:, :, ind], _ = rot_sub_data(ref=sub_data1[:, :, 0],
                                           sub=sub_data2[:, :, ind])

    cat_data[sub_data1.shape[0] * ind:sub_data1.shape[0] *
             (ind + 1), :] = sub_data1[:, :, ind]
    ind1 = nSub + ind
    cat_data[sub_data1.shape[0] * ind1:sub_data1.shape[0] *
             (ind1 + 1), :] = sub_data2[:, :, ind]

    print ind, sub_data1.shape, cat_data.shape
#sp.savez_compressed('data_bothsessions', cat_data=cat_data, lst=lst, nClusters=nClusters)
######

a = sp.load('data_bothsessions.npz')
cat_data = a['cat_data']
Esempio n. 2
0
sub_data1[:, :, count1] = d1
sub_data2[:, :, count1] = d2

count1 += 1
print count1,

nSub = sub_data1.shape[2]
dist_all_orig = sp.zeros(len(dfs_left_sm.vertices))
dist_all_rot = dist_all_orig.copy()
sub_data_orig1 = sub_data1.copy()
sub_data_orig2 = sub_data2.copy()

for ind in range(nSub):
    dist_all_orig += sp.mean((sub_data_orig1[:, :, ind]-sub_data_orig2
                             [:, :, ind])**2.0, axis=(1))
    sub_data2[:, :, ind], R = rot_sub_data(ref=sub_data1[:, :, ind],
                                        sub=sub_data2[:, :, ind])
    dist_all_rot += sp.mean((sub_data1[:, :, ind]-sub_data2[:, :, ind])**2.0,
                            axis=(1))
    print ind,

dist_all_rot = dist_all_rot/(nSub)
dist_all_orig = dist_all_orig/(nSub)

var_all = sp.zeros((sub_data1.shape[0], sub_data2.shape[1]))

avg_sub_data = sp.mean(sub_data1, axis=2)

#dfs_left_sm = patch_color_attrib(dfs_left_sm, dist_all_orig, clim=[0, 1])
#view_patch_vtk(dfs_left_sm, azimuth=-90, elevation=-180,
#               roll=-90, outfile='dist_sess_orig_view1_1sub_left_permuted.png', show=0)
#view_patch_vtk(dfs_left_sm, azimuth=90, elevation=180, roll=90,
Esempio n. 3
0
#vlang=nib.load('/big_disk/ajoshi/HCP5/' + '100307' + '/MNINonLinear/Results/tfMRI_LANGUAGE_LR/tfMRI_LANGUAGE_LR_Atlas.dtseries.nii')
#LR_flag = msk['LR_flag']
#LR_flag = np.squeeze(LR_flag) > 0
#data = sp.squeeze(vlang.get_data()).T

# Length of window
WinT = 100

dist = sp.zeros(data.shape[1] + 1 - WinT)
for t in sp.arange(0, data.shape[1] + 1 - WinT):
    temp = data[LR_flag, t:(t + WinT)]
    m = np.mean(temp, 1)
    temp = temp - m[:, None]
    s = np.std(temp, 1) + 1e-16
    temp = temp / s[:, None]
    d = temp
    if t == 0:
        d_ref = d

    drot, _ = rot_sub_data(ref=d_ref, sub=d)
    dist[t] = sp.linalg.norm(drot - d_ref)
    d_ref = d
    print t, dist[t]

plt.plot(dist)
plt.ylabel('$L^2$ dist')
plt.xlabel('time samples')
plt.savefig('Rest_L1_windowed.png', dpi=200)
plt.show()
plt.draw()
LR_flag = np.squeeze(LR_flag) > 0
data = vrest['ftdata_NLM']
#data = sp.squeeze(vrest.get_data()).T
vrest = data[LR_flag]
vrest = vrest[ind_rois, ]
vrest = vrest[:, :vmotor1.shape[1]]  # make their length same
m = np.mean(vrest, 1)
vrest = vrest - m[:, None]
s = sp.std(vrest, axis=1) + 1e-116
vmotor2 = vrest / s[:, None]

rho1 = sp.sum(vmotor2 * vmotor1, axis=1) / vmotor2.shape[1]
diffbefore = vmotor2 - vmotor1

vmotor1orig = vmotor1.copy()
vmotor1, Rot = rot_sub_data(
    ref=vmotor2, sub=vmotor1)  #, area_weight=sp.sqrt(surf_weight[ind_rois]))
rho1rot = sp.sum(vmotor2 * vmotor1, axis=1) / vmotor2.shape[1]

diffafter = vmotor2 - vmotor1

#diffbefore = gaussian_filter(diffbefore,[0,5])

plt.imshow(sp.absolute(diffbefore), aspect='auto', clim=(0, 2.0))
plt.colorbar()
plt.savefig('dist_motor_before.png', dpi=300)
plt.show()

diffafter = gaussian_filter(diffafter, [0, 5])

plt.imshow(sp.absolute(diffafter), aspect='auto', clim=(0, 2.0))
plt.colorbar()
face_run3 = face_annot[tst:tend]
faceseg1 = face_run3[:fseg1.shape[1]]
faceseg2 = face_run3[fseg1.shape[1]:]
faceseg1 = normdata(faceseg1[None, :]).squeeze()
faceseg2 = normdata(faceseg2[None, :]).squeeze()

fseg1 = normdata(fseg1)
fseg2 = normdata(fseg2)

#fn = sp.load('movie_corr.npz')
#rho_direct22 = fn['rho_direct22']
#ind = rho_direct22 > 0.4
#fseg1 = fseg1[ind, :]
#fseg2 = fseg2[ind, :]

fseg1_2, R = rot_sub_data(ref=fseg2, sub=fseg1)

faceseg1_2 = sp.dot(faceseg1, R.T)

faceseg1_2 = gaussian_filter(faceseg1_2, 4)

faceseg1 = gaussian_filter(faceseg1, 6) / 1.4
faceseg2 = gaussian_filter(faceseg2, 6) / 1.4
faceseg1 = faceseg1[:180]
faceseg2 = faceseg2[:180]
faceseg1_2 = faceseg1_2[:180]

print(sp.linalg.norm(fseg1 - fseg2), sp.linalg.norm(fseg1_2 - fseg2),
      sp.linalg.norm(fseg1_2 - fseg1))
print(
    sp.dot(faceseg1, faceseg2) / len(faceseg2),
Esempio n. 6
0
plot_roi(mask_img, mean_func_img, display_mode='y', cut_coords=4, title="Mask")
plt.show()
# Load 75 time points per volume
nii_img = index_img(func_filenames[0], slice(5, 75))
ref_mskd = nifti_masker.transform(nii_img).T
all_data = sp.zeros(
    (ref_mskd.shape[0], ref_mskd.shape[1], len(func_filenames)))
all_data[:, :, 0] = ref_mskd
all_data_orig = all_data.copy()
# Load all the data and rotate it
adhd_flag = sp.zeros(len(func_filenames))
adhd_flag[0] = adhd_dataset.phenotypic[0][22]
for ind in range(1, len(func_filenames)):
    nii_img = index_img(func_filenames[ind], slice(5, 75))
    sub_mskd = nifti_masker.transform(nii_img).T  ##check fwhm
    temp = rot_sub_data(ref_mskd, sub_mskd)
    all_data[:, :, ind] = temp
    all_data_orig[:, :, ind] = sub_mskd
    adhd_flag[ind] = adhd_dataset.phenotypic[ind][22]
    print ind,

# Variance before rotation
var_before = sp.average(sp.var(all_data_orig, axis=2), axis=1)
var_before = nifti_masker.inverse_transform(var_before)
plot_stat_map(var_before, title='Variance before rotation')

# variance after rotation
var_after = sp.average(sp.var(all_data, axis=2), axis=1)
var_after = nifti_masker.inverse_transform(var_after)
plot_stat_map(var_after, title='Variance after rotation')
    
    if count1==0:        
        sub_data = sp.zeros((d.shape[0],d.shape[1],len(lst)))

    sub_data[:,:,count1] = d

    count1+=1
    print count1,
    
nSub=sub_data.shape[2]

cat_data1=sp.zeros((nSub*sub_data.shape[0]/2,sub_data.shape[1]))
cat_data2=sp.zeros((nSub*sub_data.shape[0]/2,sub_data.shape[1]))

for ind in range(nSub):
    sub_data[:,:,ind] = rot_sub_data(ref=sub_data[:,:,0],sub=sub_data[:,:,ind])
    if ind < nSub/2:
        cat_data1[sub_data.shape[0]*ind:sub_data.shape[0]*(ind+1),:] = sub_data[:,:,ind]    
    else:
        ind2=ind-nSub/2
        cat_data2[sub_data.shape[0]*ind2:sub_data.shape[0]*(ind2+1),:] = sub_data[:,:,ind-1]    
        

 
SC = KMeans(n_clusters=nClusters,random_state=5324)
labs_all1 = SC.fit_predict(cat_data1)
labs_all2 = SC.fit_predict(cat_data2)

lab_sub1=labs_all1.reshape((sub_data.shape[0],nSub/2),order='F')
lab_sub2=labs_all2.reshape((sub_data.shape[0],nSub/2),order='F')
Esempio n. 8
0
sub1 = sub1 / (s[:, None] * sp.sqrt(1200))

data = datasub['ftdata_NLM']
sub2 = data[LR_flag, :]
m = np.mean(sub2, 1)
sub2 = sub2 - m[:, None]
s = np.std(sub2, 1) + 1e-16
sub2 = sub2 / (s[:, None] * sp.sqrt(1200))

msk_small_region = np.in1d(dfs_left.labels, roilist)
sub = sp.concatenate((sub1[msk_small_region, :], sub2[msk_small_region, :]),
                     axis=0)
pca = PCA(n_components=3)
pca.fit(sub)

sub2_rot, _ = rot_sub_data(sub1, sub2)

sub1_3d = pca.transform(sub1)
sub2_3d = pca.transform(sub2)
sub2_rot_3d = pca.transform(sub2_rot)

print(sub1.shape)
sub1 = sub1_3d
sub2 = sub2_3d
sub2_rot = sub2_rot_3d
#sub1=sp.random.rand(sub1.shape[0],sub1.shape[1])-.5
#sub2=sp.random.rand(sub2.shape[0],sub2.shape[1])-.5
print(sub1.shape)
#
m = np.mean(sub1, 1)
#sub1 = sub1 - m[:, None]
Esempio n. 9
0
temp = temp - m[:, None]
s = np.std(temp, 1) + 1e-16
temp = temp / s[:, None]
d2 = temp

sub_data1 = d1
sub_data2 = d2

ind1 = s > 1e-10

dist_all_orig = sp.zeros(len(dfs_left_sm.vertices))
dist_all_rot = dist_all_orig.copy()
sub_data_orig1 = sub_data1.copy()
sub_data_orig2 = sub_data2.copy()

dist_all_orig = sub_data_orig1 - sub_data_orig2
sub_data2, _ = rot_sub_data(ref=sub_data1, sub=sub_data2)
dist_all_rot = sub_data1 - sub_data2

plt.figure()
plt.imshow(sp.absolute(dist_all_orig[ind1, :]), aspect='auto', clim=(0, 5.0))
plt.colorbar()
plt.savefig('dist_before.pdf', dpi=300)
plt.show()
plt.figure()

plt.imshow(sp.absolute(dist_all_rot[ind1, :]), aspect='auto', clim=(0, 5.0))
plt.colorbar()
plt.savefig('dist_after.pdf', dpi=300)
plt.show()
Esempio n. 10
0
    vrest2 = scipy.io.loadmat(sub + '/fmri_tnlm_5_reduce3_v2.mat')
    data = vrest2['func_' + hemi + '']
    indx = sp.isnan(data)
    data[indx] = 0
    vrest = data
    vrest = vrest[:, :vrest1.shape[1]]
    m = np.mean(vrest, 1)
    vrest = vrest - m[:, None]
    s = np.std(vrest, 1) + 1e-116
    vrest2 = vrest / s[:, None]

    rho1 += sp.sum(vrest1 * vrest2, axis=1) / vrest1.shape[1]
    diffbefore += vrest1 - vrest2

    vrest2, Rot, _ = rot_sub_data(ref=vrest1,
                                  sub=vrest2,
                                  area_weight=sp.sqrt(surf_weight))
    t = sp.sum(vrest1 * vrest2, axis=1) / vrest1.shape[1]

    rho_all = sp.append(rho_all, t[:, None], axis=1)
    rho1rot += t

    diffafter += vrest1 - vrest2
    nsub += 1
    print(sub)

rho1rot /= nsub

#
# import seaborn as sns
#
    count1 += 1
    print count1,

nSub = sub_data.shape[2]
rperm = sp.random.permutation(dfs_right_sm.vertices.shape[0])
#rperm=range(dfs_right_sm.vertices.shape[0])
dist_all_orig = sp.zeros([nSub, nSub])
dist_all_rot = dist_all_orig.copy()
#sub_data[:,:,1]=sub_data[rperm,:,1]
sub_data_orig = sub_data.copy()

for ind1 in range(nSub):
    for ind2 in range(nSub):
        dist_all_orig[ind1, ind2] = sp.linalg.norm(sub_data_orig[:, :, ind1] -
                                                   sub_data_orig[:, :, ind2])
        sub_data_rot, _ = rot_sub_data(ref=sub_data[:, :, ind1],
                                       sub=sub_data[:, :, ind2])
        dist_all_rot[ind1, ind2] = sp.linalg.norm(sub_data[:, :, ind1] -
                                                  sub_data_rot)
        print ind1, ind2


sp.savez('rot_pairwise_dist_all_sub_by_sub.npz', dist_all_rot=dist_all_rot,
         dist_all_orig=dist_all_orig, lst=lst)
######

a = sp.load('rot_pairwise_dist_all_sub_by_sub.npz')
q = sp.argmin(a['dist_all_rot'].sum(1))
m=MDS(n_components=3,dissimilarity='precomputed')
e=m.fit_transform(a['dist_all_rot'])
print(e)
fig, ax = plt.subplots()
Esempio n. 12
0
    vrest = data[LR_flag]
    vrest = vrest[:, 400:(400 + vlang1.shape[1])]
    m = np.mean(vrest, 1)
    vrest = vrest - m[:, None]
    s = np.std(vrest, axis=1) + 1e-116
    vrest2 = vrest / s[:, None]

    # This step makes sure that the length of language task and resting state
    # are the same
    #    vrest1 = vrest1[:, 400:(400+vlang1.shape[1])]
    #    vrest2 = vrest2[:, 400:(400+vlang1.shape[1])]

    rho1 += sp.sum(vrest1 * vrest2, axis=1) / vrest1.shape[1]
    rho1lang += sp.sum(vlang1 * vlang2, axis=1) / vlang1.shape[1]

    vrest2, _ = rot_sub_data(ref=vrest1, sub=vrest2)
    vlang2, _ = rot_sub_data(ref=vlang1, sub=vlang2)

    rho2 += sp.sum(vrest2 * vrest2, axis=1) / vrest1.shape[1]
    rho2lang += sp.sum(vlang1 * vlang2, axis=1) / vlang1.shape[1]

rho1 = smooth_surf_function(dfs_left_sm, rho1, a1=0, a2=1)
rho2 = smooth_surf_function(dfs_left_sm, rho2, a1=0, a2=1)

view_patch(dfs_left_sm,
           rho1 / len(lst),
           clim=[0, 1],
           outfile='rest_before_rot.png',
           show=0)
view_patch(dfs_left_sm,
           rho2 / len(lst),
        sub_data1 = sp.zeros((d1.shape[0], d1.shape[1], len(lst)))
        sub_data2 = sp.zeros((d2.shape[0], d2.shape[1], len(lst)))

    sub_data1[:, :, count1] = d1
    sub_data2[:, :, count1] = d2

    count1 += 1
    print count1,

nSub = sub_data1.shape[2]

npts = sp.sum(msk_small_region)
cat_data = sp.zeros((2 * nSub * npts, sub_data1.shape[1]))

for ind in range(nSub):
    d1 = rot_sub_data(ref=sub_data1[:, :, 0], sub=sub_data1[:, :, ind])
    #    sub_data1[:,:,ind] =d1[msk_small_region,:]
    d2 = rot_sub_data(ref=sub_data1[:, :, 0], sub=sub_data2[:, :, ind])
    #   sub_data2[:,:,ind] =d[msk_small_region,:]

    cat_data[npts * ind:npts * (ind + 1), :] = d1[msk_small_region, :]
    ind1 = nSub + ind
    cat_data[npts * ind1:npts * (ind1 + 1), :] = d2[msk_small_region, :]

    print ind, sub_data1.shape, cat_data.shape
sp.savez_compressed('data_bothsessions_precuneus',
                    cat_data=cat_data,
                    msk_small_region=msk_small_region)

SC = KMeans(n_clusters=nClusters, random_state=5324)
labs_all = SC.fit_predict(cat_data)
temp = temp - m[:, None]
s = np.std(temp, 1)+1e-16
temp = temp/s[:, None]
d2 = temp

sub_data1 = d1
sub_data2 = d2


dist_all_orig = sp.zeros(len(dfs_left_sm.vertices))
dist_all_rot = dist_all_orig.copy()
sub_data_orig1 = sub_data1.copy()
sub_data_orig2 = sub_data2.copy()

dist_all_orig = sub_data_orig1-sub_data_orig2
sub_data2, Rot = rot_sub_data(ref=sub_data1, sub=sub_data2)
dist_all_rot = sub_data1-sub_data2

plt.imshow(sp.absolute(dist_all_orig), aspect='auto', clim=(0.0, 5.0))
plt.colorbar()
plt.savefig('dist_before.pdf', dpi=300)
plt.show()

plt.imshow(dist_all_rot, aspect='auto', clim=(0.0, 5.0))
plt.colorbar()
plt.savefig('dist_after.pdf', dpi=300)
plt.show()
plt.imshow(Rot, aspect='auto')
plt.colorbar()
plt.savefig('Rot.pdf', dpi=300)
plt.show()
Esempio n. 15
0
LR_flag = np.squeeze(LR_flag) > 0
data = vrest['ftdata_NLM']
#data = sp.squeeze(vrest.get_data()).T
vrest = data[LR_flag]
vrest = vrest[ind_rois, ]
vrest = vrest[:, :vtongue1.shape[1]]  # make their length same
m = np.mean(vrest, 1)
vrest = vrest - m[:, None]
s = sp.std(vrest, axis=1) + 1e-116
vrest1 = vrest / s[:, None]

rho1 = sp.sum(vrest1 * vtongue1, axis=1) / vrest1.shape[1]
diffbefore = vrest1 - vtongue1

vtongue1, Rot = rot_sub_data(ref=vrest1,
                             sub=vtongue1,
                             area_weight=sp.sqrt(surf_weight[ind_rois]))
#vrest1 = gaussian_filter(vrest1,[0,2])
#vtongue1 = gaussian_filter(vtongue1,[0,2])
#vrest1=vrest1[:,78:95]
#vtongue1=vtongue1[:,78:95]
#vrest1=vrest1[:,57:74]
#vtongue1=vtongue1[:,57:74]

vrest1 = vrest1[:, 568:884]
vtongue1 = vtongue1[:, 568:884]  # Language task

rho1rot = sp.sum(vrest1 * vtongue1, axis=1) / vrest1.shape[1]

diffafter = vrest1 - vtongue1
rho_before = sp.sum(sub1seg1 * sub1seg2, axis=1) / sub1seg1.shape[1]

dfs_ref = patch_color_attrib(dfs_ref, rho_before, clim=[0, .7])
view_patch_vtk(dfs_ref,
               azimuth=90,
               elevation=180,
               roll=90,
               outfile='before2_seg1to2_1.png')
view_patch_vtk(dfs_ref,
               azimuth=-90,
               elevation=180,
               roll=-90,
               outfile='before2_seg1to2_2.png')

_, Rot12 = rot_sub_data(ref=sub2seg2, sub=sub1seg1)

sub1seg1rot = sp.dot(sub1seg1, Rot12.T)

rho_after = sp.sum(sub1seg1rot * sub1seg2, axis=1) / sub2seg2.shape[1]
dfs_ref = patch_color_attrib(dfs_ref, rho_after, clim=[0, .7])
writedfs('temp.dfs', dfs_ref)
view_patch_vtk(dfs_ref,
               azimuth=90,
               elevation=180,
               roll=90,
               outfile='after2_seg1to2_1.png')
view_patch_vtk(dfs_ref,
               azimuth=-90,
               elevation=180,
               roll=-90,
Esempio n. 17
0
# for nsboot in :
# for WinT in win_lengths:
temp = data[LR_flag, :]
m = np.mean(temp, 1)
temp = temp - m[:, None]
s = sp.std(temp, axis=1) + 1e-116
temp = temp / s[:, None]
d1 = temp
temp = data2[LR_flag, :]
m = np.mean(temp, 1)
temp = temp - m[:, None]
s = sp.std(temp, axis=1) + 1e-116
temp = temp / s[:, None]
d2 = temp

drot, _ = rot_sub_data(ref=d2, sub=d1)
full_corr = sp.sum(drot * d2, axis=1) / d2.shape[1]

for nb, iWinL in itertools.product(nbootiter, sp.arange(len(win_lengths))):

    WinL = win_lengths[iWinL]
    startpt = randint(0, data.shape[1])
    t = sp.arange(startpt, startpt + WinL)
    t = sp.mod(t, data.shape[1])
    temp = data[LR_flag, :]
    temp = temp[:, t]
    m = np.mean(temp, 1)
    temp = temp - m[:, None]
    s = sp.std(temp, axis=1) + 1e-116
    temp = temp / s[:, None]
    d1 = temp
vrest1 = vrest/s[:,None]

#vrest=nib.load('/home/ajoshi/HCP5/110411/MNINonLinear/Results/rfMRI_REST1_LR/rfMRI_REST1_LR_Atlas_hp2000_clean.dtseries.nii')
data = scipy.io.loadmat(os.path.join(p_dir, sub2, sub2 + '.rfMRI_REST1_RL.reduce3.ftdata.NLM_11N_hvar_25.mat'))

LR_flag = msk['LR_flag']
LR_flag = np.squeeze(LR_flag) > 0
data = data['ftdata_NLM']
#data = sp.squeeze(vrest.get_data()).T
vrest = data[LR_flag,:]
m = np.mean(vrest, 1)
vrest = vrest - m[:,None]
s = np.std(vrest, 1)+1e-16
vrest2 = vrest/s[:,None]

vrest2=rot_sub_data(ref=vrest1,sub=vrest2)

rho_rot = sp.dot(vrest1,vrest2.T)/vrest1.shape[1]
dist_mat = sp.absolute(sp.arccos(rho_rot))

ind = sp.argmin(dist_mat,axis=0)
#ind = linear_assignment(dist_mat)

view_patch(dfs_left_sm,outfile='before_registered_surf.png',show=1)

dfs_left_sm.faces=ind[dfs_left_sm.faces]
view_patch(dfs_left_sm,outfile='registered_surf1.png',show=1)

#rho1=smooth_surf_function(dfs_left_sm,rho1)
#rho2=smooth_surf_function(dfs_left_sm,rho2)