Exemplo n.º 1
0
    def _parallel_nbs(self, FreqBand, thresh):
        """
		Calculates nbs for one Frequency band. Is called in calc_nbs. 
		:return dataframe with pvals of 
		"""
        print(f'Processing {FreqBand}, Thresh: {thresh}')
        ResultDict = {
            'Freq': [],
            'Threshold': [],
            'P-Val': [],
            'Component-File': [],
            'Index': []
        }
        GroupFCList = []
        for Group in self.GroupIDs.keys():
            # Find File and load file
            GroupFCs = np.load(
                self.find(suffix='stacked-FCs',
                          filetype='.npy',
                          Group=Group,
                          Freq=FreqBand))
            # Transform matrix for nbs (NxNxP with P being the number of subjects per group)
            tGroupFCs = np.moveaxis(GroupFCs, 0, -1)
            GroupFCList.append(tGroupFCs)

        # Set Component File Path
        CompFileName = self.createFileName(suffix='Component-Adj',
                                           filetype='.npy',
                                           Freq=FreqBand,
                                           Thresh=thresh)
        CompFilePath = self.createFilePath(self.EdgeStatsDir, 'NetBasedStats',
                                           'Components', CompFileName)

        pval, adj, null = nbs_bct(GroupFCList[0],
                                  GroupFCList[1],
                                  thresh=thresh,
                                  k=1000)
        print('Adjacency Shape: ', adj.shape)

        for idx, p in enumerate(pval):
            ResultDict['Freq'].append(FreqBand)
            ResultDict['Threshold'].append(thresh)
            ResultDict['P-Val'].append(p)
            ResultDict['Component-File'].append(CompFilePath)
            ResultDict['Index'].append(idx + 1)

        # Save Null-Sample and Component to File
        NullFileName = self.createFileName(suffix='Null-Sample',
                                           filetype='.npy',
                                           Freq=FreqBand,
                                           Thresh=thresh)
        NullFilePath = self.createFilePath(self.EdgeStatsDir, 'NetBasedStats',
                                           'Null-Sample', NullFileName)

        np.save(NullFilePath, null)
        np.save(CompFilePath, adj)

        # Return dataframe
        df = pd.DataFrame(ResultDict, index=ResultDict['Index'])
        return df
Exemplo n.º 2
0
def apply_nbs(X, y, th, iterations=1000, verbose=False):
    """
    Application of NBS. Computation of max component size, its null 
    distribution and p-value.
    
    Parameters:
    ----------   
    X: shape = ((N+M), n, n), where N and M are the sizes of the two grous, and 
       n is the number of nodes in the graphs.
        Data from both groups. (N+M) connectivity matrices.
    y: shape=(N+M,)
        Class labels for the data
    th: float
        Minimum t-value used as threshold
    iterations: int
        Number of permutations used to estimate the empirical null distribution
    verbose: bool
        Verbosity
    
    Returns:
    -------   
    max_comp_size: float
        The size of the biggest connected component.
    max_comp_null: array
        Null distribution of the size of biggest connected component.
    p_value: float
         p-value   
    """
    #taking the two groups
    XX = X[y == 0].T
    YY = X[y == 1].T

    #applying nbs
    pval, adj, max_comp_null = nbs.nbs_bct(XX,
                                           YY,
                                           th,
                                           iterations,
                                           verbose=verbose)

    #Computing connected component sizes
    _, comp_sizes = np.unique(adj[adj > 0], return_counts=True)
    max_comp_size = np.max(
        comp_sizes) / 2  #div by 2 because edges are counted twice

    return max_comp_size, max_comp_null, np.min(pval)
Exemplo n.º 3
0
def apply_nbs(X, y, th, iterations=1000, verbose=False):
    """
    Application of NBS. Computation of max component size, its null 
    distribution and p-value.
    
    Parameters:
    ----------   
    X: shape = ((N+M), n, n), where N and M are the sizes of the two grous, and 
       n is the number of nodes in the graphs.
        Data from both groups. (N+M) connectivity matrices.
    y: shape=(N+M,)
        Class labels for the data
    th: float
        Minimum t-value used as threshold
    iterations: int
        Number of permutations used to estimate the empirical null distribution
    verbose: bool
        Verbosity
    
    Returns:
    -------   
    max_comp_size: float
        The size of the biggest connected component.
    max_comp_null: array
        Null distribution of the size of biggest connected component.
    p_value: float
         p-value   
    """    
    #taking the two groups
    XX = X[y == 0].T
    YY = X[y == 1].T
    
    #applying nbs
    pval, adj, max_comp_null = nbs.nbs_bct(XX, YY, th, iterations, 
                                  verbose=verbose)
                                  
    #Computing connected component sizes
    _, comp_sizes = np.unique(adj[adj>0], return_counts=True)
    max_comp_size = np.max(comp_sizes) / 2 #div by 2 because edges are counted twice
    
    return max_comp_size, max_comp_null, np.min(pval)
Exemplo n.º 4
0
reshape_Ses2_trauma = np.moveaxis(np.array(cor_z_array_2),0,-1)
#%% create a symmetric matrix for CPM
sym_mat = []
for i in range(len(ses_1_trauma_corr_z)):
    x = nilearn.connectome.sym_matrix_to_vec(ses_1_trauma_corr_z[i], discard_diagonal=False)
    x_mat = nilearn.connectome.vec_to_sym_matrix(x)
    sym_mat.append(x_mat)

sym_mat = np.array(sym_mat)
sym_mat = np.moveaxis(sym_mat,0,-1)
scipy.io.savemat('ses2_trauma.mat', dict(x=sym_mat))

#%% run NBS
from bct import nbs
# we compare ket1 and ket3
pval, adj, _ = nbs.nbs_bct(reshape_ses1_trauma, reshape_Ses2_trauma, thresh=2.5, tail='both',k=500, paired=True, verbose = True)


# there is a difference. Lets plot the network
# first create diff
diffMat_2_1 = contFuncs(ses_1_trauma_corr, ses_2_trauma_corr)

# reshape and save as .mat for CPM
diffMat_21_CPM = np.moveaxis(np.array(diffMat_2_1),0, -1)
scipy.io.savemat('diffMat_2_1.mat', dict(x=diffMat_21_CPM))
diffMat_2_1_thr = np.array(diffMat_2_1) * adj
diffMat_2_1_thr_avergae = np.mean(diffMat_2_1_thr, axis = 0)
# how many edges?
np.sum(adj)
# 1310 edges survived
np.savetxt('diffMat_2_1_thr.csv', diffMat_2_1_thr_avergae)
ket4Reshape = np.moveaxis(np.array(ket4_corr), 0,-1)
mid1Reshape = np.moveaxis(np.array(mid1_corr),0,-1)
mid2Reshape = np.moveaxis(np.array(mid2_corr),0,-1)
mid3Reshape = np.moveaxis(np.array(mid3_corr),0,-1)

#print(mid3Reshape.shape)


# In[ ]:


# now we can run NBS
# NBS is taken from: https://github.com/aestrivex/bctpy, can be installed using pip (pip install bctpy)
from bct import nbs
# we compare ket1 and ket3
pval, adj, _ = nbs.nbs_bct(ket1Reshape, ket3Reshape, thresh=3, tail='both',k=1000, paired=True, verbose = True)
# check mean p vlue
#np.mean(checkNBS[0])


# In[ ]:
# look at p values and No. of components.
print(pval.shape)
print (pval)
len(pval)
print(adj.shape)

print(adj[0:10])
ad = np.array(adj)
print(ad[:,0:10])
#bct.adjacency_plot_und(adj, coords, tube=False)
Exemplo n.º 6
0
# %%
# lets run NBS
ketDeltaReshape = np.moveaxis(np.array(ketDelta), 0, -1)
midDeltaReshape = np.moveaxis(np.array(midDelta), 0, -1)

ketSes2_reshape = np.moveaxis(np.array(ketSes2), 0, -1)
midSes2_reshape = np.moveaxis(np.array(midSes2), 0, -1)
print(ketDeltaReshape.shape)
print(midDeltaReshape.shape)
from bct import nbs

# we compare ket1 and ket3
pval, adj, _ = nbs.nbs_bct(ketDeltaReshape,
                           midDeltaReshape,
                           thresh=2.3,
                           tail='both',
                           k=1000,
                           paired=False,
                           verbose=False)
print(pval)

# %%
# ok lets threshold using adjacency
#tTresh = t[np.tril(adj)]
tTresh = t * adj
#tTresh[np.triu(tTresh)] = t
sns.heatmap(tTresh,
            xticklabels=labels,
            yticklabels=labels,
            cmap='coolwarm',
            annot=True)
Exemplo n.º 7
0
ket4Reshape = np.moveaxis(np.array(ket4_corr), 0,-1)
mid1Reshape = np.moveaxis(np.array(mid1_corr),0,-1)
mid2Reshape = np.moveaxis(np.array(mid2_corr),0,-1)
mid3Reshape = np.moveaxis(np.array(mid3_corr),0,-1)

#print(mid3Reshape.shape)


# In[ ]:


# now we can run NBS
# NBS is taken from: https://github.com/aestrivex/bctpy, can be installed using pip (pip install bctpy)
from bct import nbs
# we compare ket1 and ket3
pval, adj, _ = nbs.nbs_bct(ket1Reshape, ket3Reshape, thresh=3, tail='both',k=1000, paired=True, verbose = True)
# check mean p vlue
#np.mean(checkNBS[0])


# In[ ]:
# look at p values and No. of components.
print(pval.shape)
print (pval)
len(pval)
print(adj.shape)

print(adj[0:10])
ad = np.array(adj)
print(ad[:,0:10])
#bct.adjacency_plot_und(adj, coords, tube=False)
#%%
    
#%% create time series (with condounders)
session1 = timeSeries(func_files=fileList(subject_list_1,'1'), confound_files=confList(subject_list_1, '1'))
session2 = timeSeries(func_files=fileList(subject_list_1,'2'), confound_files=confList(subject_list_1, '2'))
session3 = timeSeries(func_files=fileList(subject_list3,'3'), confound_files=confList(subject_list3, '3'))

#%%
os.chdir('/home/or/kpe_conn/ShenParc')
np.save("session_1Timeseries_ShenRS",session1) # saving array
np.save("session_2TimeseriesShenRS",session2)
np.save("session_3TimeseriesShenRS", session3)


#%% Correlations
   
cor1 = createCorMat(session1)
cor2 = createCorMat(session2)
cor3 = createCorMat(session3)

#%% NBS
cor1Reshape = np.moveaxis(np.array(cor1),0,-1)
cor2Reshape = np.moveaxis(np.array(cor2),0,-1)
cor3Reshape = np.moveaxis(np.array(cor3),0,-1)
from bct import nbs
# we compare ket1 and ket3
pval, adj, _ = nbs.nbs_bct(cor1Reshape, cor2Reshape, thresh=2.5, tail='both',k=500, paired=True, verbose = True)
# no difference in RS across groups

# Compare first and 3rd
pval, adj, _ = nbs.nbs_bct(cor1Reshape, cor2Reshape, thresh=2.5, tail='both',k=500, paired=True, verbose = True)
Exemplo n.º 9
0
plotting.plot_connectome(empty, coords, edge_threshold='95%', colorbar=True, black_bg = False, annotate = True, node_color = color_node)

# plot in browser
view = plotting.view_connectome(empty, coords, threshold='90%') 
view.open_in_browser() 
view_color.open_in_browser()


#%% Run Network Based Analysis
# first reshape the matrix dimensions (right now its [subs,x,y]) to [x,y,subs]
trt1Reshape = np.moveaxis(np.array(trauma_1st_ses),0,-1)
trt2Reshape = np.moveaxis(np.array(trauma_2_1st_ses),0,-1)

from bct import nbs
# we compare ket1 and ket3
pval, adj, _ = nbs.nbs_bct(trt1Reshape, trt2Reshape, thresh=2.5, tail='both',k=500, paired=True, verbose = True)
# one network is different

#%% compare sad to trauma 1
sad1Reshape = np.moveaxis(np.array(sad_corr_subs),0,-1)
pvalSadTr, adjSadTr, _ = nbs.nbs_bct(trt1Reshape, sad1Reshape, thresh=2.5, tail='both',k=500, paired=True, verbose = True)
# sig difference between those two networks. 
# now contrast between the mean matrices of each condition
contMat = np.mean(trauma_1st_ses, axis = 0) - np.mean(sad_corr_subs, axis = 0)  
# then multiply by the adjacency matrix created by NBS.
adjCor = contMat * adjSadTr
np.max(adjCor)
# now we can differentiate to two positive and negative matrices
pos_cor = np.array(adjCor)
pos_cor[pos_cor<=0] = 0 # zero for everything lower than zero
np.max(pos_cor)
Exemplo n.º 10
0
# using the condition labels to seperate midazolam and ketamine
ketamine_mat = []
midazolam_mat = []
for i,x in enumerate(condition_label):
    print(i)
    print(x)
    if x==1:
        # ketamine
        ketamine_mat.append(mat_2[i])
    else:
        midazolam_mat.append(mat_2[i])

#%% now reshape and NBS
ketamine = np.moveaxis(np.array(ketamine_mat),0,-1)
midazolam = np.moveaxis(np.array(midazolam_mat),0,-1)
#%%
from bct import nbs
# we compare ket1 and ket3
pval, adj, _ = nbs.nbs_bct(ketamine, midazolam, thresh=2.5, tail='both',k=500, paired=False,
 verbose = True)
# no difference in RS across groups

# %%
nilearn.plotting.plot_matrix(mat_2[0], labels=np.array(labels.Yeo_networks7) , 
 colorbar=True)

#%%
np.array(labels.Difumo_names)