Exemplo n.º 1
0
    def create_feature_matrix(self):
       # Feature matrix with each element containing an NxN array
       feature_matrix = []

       # EDGE WEIGHT (Depth 0)
       structural_connectivity_array = self.get_structure_and_function()
       feature_matrix.append(structural_connectivity_array)

       # DEGREE (Depth 1 & 2)
       deg = bct.degrees_und(structural_connectivity_array)
       self.fill_array_2D(feature_matrix, deg)

       # Conversion of connection weights to connection lengths
       connection_length_matrix = bct.weight_conversion(structural_connectivity_array, 'lengths')

       # SHORTEST PATH LENGTH (Depth 3 & 4)
       shortest_path = bct.distance_wei(connection_length_matrix)
       feature_matrix.append(shortest_path[0])  # distance (shortest weighted path) matrix
       feature_matrix.append(shortest_path[1])  # matrix of number of edges in shortest weighted path

       # BETWEENNESS CENTRALITY (Depth 5 & 6)
       bc = bct.betweenness_wei(connection_length_matrix)
       from python_files.create_feature_matrix import fill_array_2D
       self.fill_array_2D(feature_matrix, bc)

       # CLUSTERING COEFFICIENTS (Depth 7 & 8)
       cl = bct.clustering_coef_wu(connection_length_matrix)
       self.fill_array_2D(feature_matrix, cl)

       return feature_matrix
Exemplo n.º 2
0
def get_network_measures(fname_connectivity):

    C = pd.read_table(fname_connectivity, header=None, dtype=object)
    #cleaning up structural data
    C = C.drop([0, 1], axis=1)
    C = C.drop([0], axis=0)
    C = C.iloc[:, :-1]
    #C_electrode_names = np.array([e[-4:] for e in  np.array(C.iloc[0])])
    C = np.array(C.iloc[1:, :]).astype(
        'float64')  #finally turn into numpy array

    #binarize connectivity matrix
    C_binarize = bct.weight_conversion(C, "binarize")

    #Calculate Network Measures:

    # 1. Density
    density = bct.density_und(C_binarize)[0]

    # 2. Degree
    degree_mean = np.mean(bct.degrees_und(C_binarize))

    # 3. Clustering Coefficient
    clustering_coefficient = bct.clustering_coef_bu(C_binarize)
    clustering_coefficient_mean = np.mean(clustering_coefficient)

    # 4. characteristic path length (i.e. average shortest path length)
    #Get distance
    C_dist = bct.distance_bin(C_binarize)
    #If there are any disjointed nodes set them equal to the largest non-Inf length
    C_dist_max = np.nanmax(
        C_dist[C_dist != np.inf])  #find the max length (that's not infinity)
    C_dist[np.where(C_dist == np.inf
                    )] = C_dist_max  #find the inifnities, and replace with max
    characteristic_path_length = bct.charpath(C_dist)[0]

    # 5. Small Worldness
    Cr = degree_mean / len(C_binarize)
    Lr = np.log10(len(C_binarize)) / np.log10(degree_mean)

    gamma = clustering_coefficient_mean / Cr
    lamb = characteristic_path_length / Lr

    sigma = gamma / lamb
    small_worldness = sigma

    network_measures = np.zeros(shape=(1, 5))
    network_measures[0, :] = [
        density, degree_mean, clustering_coefficient_mean,
        characteristic_path_length, small_worldness
    ]
    colLabels = [
        "Density", "degree_mean", "clustering_coefficient_mean",
        "characteristic_path_length", "small_worldness"
    ]
    network_measures_df = pd.DataFrame(network_measures, columns=colLabels)
    return network_measures_df
Exemplo n.º 3
0
def get_network_measures(ifname_connectivity):

    C = np.array(pd.DataFrame(loadmat(ifname_connectivity)['connectivity']))
    #binarize connectivity matrix
    C_binarize = bct.weight_conversion(C, "binarize")

    #Calculate Network Measures:

    # 1. Density
    density = bct.density_und(C_binarize)[0]

    # 2. Degree
    degree_mean = np.mean(bct.degrees_und(C_binarize))

    # 3. Clustering Coefficient
    clustering_coefficient = bct.clustering_coef_bu(C_binarize)
    clustering_coefficient_mean = np.mean(clustering_coefficient)

    # 4. characteristic path length (i.e. average shortest path length)
    #Get distance
    C_dist = bct.distance_bin(C_binarize)
    #If there are any disjointed nodes set them equal to the largest non-Inf length
    C_dist_max = np.nanmax(
        C_dist[C_dist != np.inf])  #find the max length (that's not infinity)
    C_dist[np.where(C_dist == np.inf
                    )] = C_dist_max  #find the inifnities, and replace with max
    characteristic_path_length = bct.charpath(C_dist)[0]

    # 5. Small Worldness
    Cr = degree_mean / len(C_binarize)
    Lr = np.log10(len(C_binarize)) / np.log10(degree_mean)

    gamma = clustering_coefficient_mean / Cr
    lamb = characteristic_path_length / Lr

    sigma = gamma / lamb
    small_worldness = sigma

    network_measures = np.zeros(shape=(1, 5))
    network_measures[0, :] = [
        density, degree_mean, clustering_coefficient_mean,
        characteristic_path_length, small_worldness
    ]
    colLabels = [
        "Density", "degree_mean", "clustering_coefficient_mean",
        "characteristic_path_length", "small_worldness"
    ]
    network_measures_df = pd.DataFrame(network_measures, columns=colLabels)
    return network_measures_df
Exemplo n.º 4
0
def extract_epoch_graph_features(W):
    import bct

    L = bct.weight_conversion(W, "lengths")
    L[W == 0] = np.inf
    D, _ = bct.distance_wei(L)

    l, eff, ecc, radius, diameter = bct.charpath(D, include_infinite=False)

    return [
        bct.clustering_coef_wu(W),
        bct.efficiency_wei(W, local=True),
        bct.betweenness_wei(L),
        ecc,
        [l, eff, radius, diameter],
    ]
Exemplo n.º 5
0
def make_structural_connectivity_array(structure_file_path_array):
    # Get Structural Connectivity data in mat file format. Output from DSI studio
    structural_connectivity_array = []
    counter = 0
    for file_path in structure_file_path_array:
        structural_connectivity_array.append(
            np.array(pd.DataFrame(loadmat(file_path)['connectivity'])))
        counter += 1
        print("I\'m trying...{0}".format(counter))

    # *** Conversion of connection weights to connection lengths ***
    connection_length_matrix = []
    for s in structural_connectivity_array:
        connection_length_matrix.append(bct.weight_conversion(s, 'lengths'))

    # Betweenness Centrality
    btwn_cent_arr = []
    for structural_matrix in connection_length_matrix:
        btwn_cent_arr.append(bct.betweenness_wei(structural_matrix))

    return btwn_cent_arr
Exemplo n.º 6
0
def create_feature_matrix(structure_matrix_file):
    # Feature matrix with each element containing an NxN array
    feature_matrix = []

    # EDGE WEIGHT (Depth 0)
    # weighted & undirected network
    structural_connectivity_array = np.array(
        pd.DataFrame(loadmat(structure_matrix_file)['connectivity']))
    feature_matrix.append(structural_connectivity_array)

    # DEGREE (Depth 1 & 2)
    # Node degree is the number of links connected to the node.
    deg = bct.degrees_und(structural_connectivity_array)
    fill_array_2D(feature_matrix, deg)

    # *** Conversion of connection weights to connection lengths ***
    connection_length_matrix = bct.weight_conversion(
        structural_connectivity_array, 'lengths')
    # print(connection_length_matrix)

    # SHORTEST PATH LENGTH (Depth 3 & 4)
    '''
    The distance matrix contains lengths of shortest paths between all pairs of nodes.
    An entry (u,v) represents the length of shortest path from node u to node v.
    The average shortest path length is the characteristic path length of the network.
    '''
    shortest_path = bct.distance_wei(connection_length_matrix)
    feature_matrix.append(
        shortest_path[0])  # distance (shortest weighted path) matrix
    feature_matrix.append(
        shortest_path[1]
    )  # matrix of number of edges in shortest weighted path

    # BETWEENNESS CENTRALITY (Depth 5 & 6)
    '''
    Node betweenness centrality is the fraction of all shortest paths in
    the network that contain a given node. Nodes with high values of
    betweenness centrality participate in a large number of shortest paths.
    '''
    bc = bct.betweenness_wei(connection_length_matrix)
    fill_array_2D(feature_matrix, bc)

    # CLUSTERING COEFFICIENTS (Depth 7 & 8)
    '''
    The weighted clustering coefficient is the average "intensity" of
    triangles around a node.
    '''
    cl = bct.clustering_coef_wu(connection_length_matrix)
    fill_array_2D(feature_matrix, cl)

    # Find disconnected nodes - component size set to 1
    new_array = structural_connectivity_array
    W_bin = bct.weight_conversion(structural_connectivity_array, 'binarize')
    [comps, comp_sizes] = bct.get_components(W_bin)
    print('comp: ', comps)
    print('sizes: ', comp_sizes)
    for i in range(len(comps)):
        if (comps[i] != statistics.mode(comps)):
            new_array = np.delete(new_array, new_array[i])

    return feature_matrix
Exemplo n.º 7
0
# create static arrays (no time series)
static_fMRI = np.average(fMRI, 2)
static_broad_EEG = np.average(broad_EEG, 2)
static_delta_EEG = np.average(delta_EEG, 2)
static_theta_EEG = np.average(theta_EEG, 2)
static_alpha_EEG = np.average(alpha_EEG, 2)
static_beta_EEG = np.average(beta_EEG, 2)
static_gamma_EEG = np.average(gamma_EEG, 2)

# threshold fMRI
#plots.plot_connectivity_matrix(static_fMRI, "Connectivity", "No threshold", "static_fMRI", False)
bct.threshold_absolute(static_fMRI, thr=0, copy=False)
#plots.plot_connectivity_matrix_thresholded(static_fMRI, "Connectivity", "Threshold > 0", "static_fMRI_threshold0", False)
static_fMRI_unweighted = bct.weight_conversion(static_fMRI,
                                               'binarize',
                                               copy=True)
#plots.plot_connectivity_matrix_binarized(static_fMRI_unweighted, "Unweighted (binarized)", "static_fMRI_unweighted", False)

# threshold EEGs
names = ("static_broad_EEG", "static_delta_EEG", "static_theta_EEG",
         "static_alpha_EEG", "static_beta_EEG", "static_gamma_EEG")
matrices = [
    static_broad_EEG, static_delta_EEG, static_theta_EEG, static_alpha_EEG,
    static_beta_EEG, static_gamma_EEG
]
matrices_unweighted = []

for i in range(len(names)):
    #plots.plot_connectivity_matrix(matrices[i], "Connectivity", "No threshold", names[i], False)
    mean = np.mean(matrices[i])
Exemplo n.º 8
0
def cal_thalamus_and_cortical_ROIs_nodal_properties(Thalamocortical_corrmat, Cortical_adj, \
 Cortical_plus_thalamus_CI, Thalamus_CIs, Cortical_CI, Cortical_ROIs_positions, Thalamus_voxel_positions, cost_thresholds):
    '''Function to calculate voxel-wise nodal properties of the thalamus, and nodal properties of cortical ROIs. 
	Metrics to be calculated include:
	
	Participation Coefficient (PC)
	Between network connectivity weiight (BNWR)
		Ratio of connection weight devoted to between network interactions
	Number of network/modules/components connected (NNC)
	Within module degree zscore (WMD)
		For WMD, matrices will be binarzied, and normalized to corticocortical connections' mean and SD

	usage: PCs, BNWRs, NNCs, WMDs, bPCs, mean_NNC, mean_BNWR, mean_PC, mean_bPC, mean_WMD = cal_thalamus_and_cortical_ROIs_nodal_properties(Thalamocor_adj,
                Cortical_adj,
                Cortical_plus_thalamus_CI,
                Thalamus_CIs,
                Cortical_CI,
                Cortical_ROIs_positions,
                Thalamus_voxel_positions,
                cost_thresholds)
    
    ----
    Parameters
    ----
    Thalamocor_adj: Thalamocortical adj matrix
    Cortical_adj: corticocortical adj matrix
    Cortical_plus_thalamus_CI: A vector of community/module/network assignment of all nodes, cortical ROIs + thalamic voxels
    Thalamus_CIs: A vector of network assignements for thalamic voxels
    Cortical_CI: A vector of network assignments for cortical ROIs 
    Cortical_ROIs_positions: a position vector indicating in the thalamocortical adj matrix which rows/columns are cortical ROIs
    Thalamus_voxel_posistions: a position vector indicating in the thalamocortical adj matrix which rows/columns are thalamic voxels
    cost_thresholds: the thoresholds that can threshold the thalamocortical edges at density .01 to .15. 

	return variables are graph metrics across thresholds (with "s"), or averaged across thresholds "mean"

    '''

    ##Thalamus nodal roles
    Thalamocortical_corrmat[np.isnan(Thalamocortical_corrmat)] = 0

    #PC
    PCs = []  #np.zeros(Cortical_plus_thalamus_CI.size)
    bPCs = []  #np.zeros(Cortical_plus_thalamus_CI.size)
    #BNWR between network connectivity weight
    BNWRs = []  #np.zeros(Cortical_plus_thalamus_CI.size)
    #get number of networks/communities connected
    NNCs = []  #np.zeros(Cortical_plus_thalamus_CI.size)

    #loop through costs
    for c in cost_thresholds:
        #copy adj matrix and then threshold
        Par_adj = Thalamocortical_corrmat.copy()
        #remove weights connected to low SNR communities (CI==0, orbital frontal, inferior temporal)
        Par_adj[Cortical_ROIs_positions[Cortical_CI == 0], :] = 0
        Par_adj[:, Cortical_ROIs_positions[Cortical_CI == 0]] = 0
        Par_adj[Par_adj < c] = 0

        #binary
        bPar_adj = Par_adj.copy()
        bPar_adj = bPar_adj > c

        #PC
        PCs += [bct.participation_coef(Par_adj, Cortical_plus_thalamus_CI)]
        bPCs += [bct.participation_coef(bPar_adj, Cortical_plus_thalamus_CI)]
        #aPCs += [bct.participation_coef(Par_adj, Cortical_plus_thalamus_CI)]

        #BNWR and NNCs
        Tha_BNWR = np.zeros(Cortical_plus_thalamus_CI.size)
        Tha_NNCs = np.zeros(Cortical_plus_thalamus_CI.size)
        for ix, i in enumerate(Thalamus_voxel_positions):
            sum_between_weight = np.nansum(
                Par_adj[i, Cortical_plus_thalamus_CI != Thalamus_CIs[ix]])
            sum_total = np.nansum(Par_adj[i, :])
            Tha_BNWR[i] = sum_between_weight / sum_total
            Tha_BNWR[i] = np.nan_to_num(Tha_BNWR[i])

            Tha_NNCs[i] = len(
                np.unique(Cortical_plus_thalamus_CI[Par_adj[i, ] != 0]))
        BNWRs += [Tha_BNWR]
        NNCs += [Tha_NNCs]

    ##Cortical nodal roles
    Cortical_adj[np.isnan(Cortical_adj)] = 0

    Cortical_PCs = []  #np.zeros(Cortical_CI.size)
    Cortical_bPCs = []  #np.zeros(Cortical_CI.size)
    Cortical_BNWR = []  #np.zeros(Cortical_CI.size)
    Cortical_NNCs = []  #np.zeros(Cortical_plus_thalamus_CI.size)

    for ix, c in enumerate(np.arange(0.01, 0.16, 0.01)):
        M = bct.threshold_proportional(Cortical_adj, c, copy=True)
        bM = bct.weight_conversion(M, 'binarize', copy=True)

        #PC
        Cortical_PCs += [bct.participation_coef(M, Cortical_CI)]
        Cortical_bPCs += [bct.participation_coef(bM, Cortical_CI)]

        #BNWR and NNC
        BNWR = np.zeros(Cortical_CI.size)
        Cor_NNCs = np.zeros(Cortical_plus_thalamus_CI.size)
        for i in range(len(Cortical_CI)):
            sum_between_weight = np.nansum(M[i, Cortical_CI != Cortical_CI[i]])
            sum_total = np.nansum(M[i, :])
            BNWR[i] = sum_between_weight / sum_total
            BNWR[i] = np.nan_to_num(BNWR[i])

            Cor_NNCs[i] = len(np.unique(Cortical_CI[M[i, ] != 0]))
        Cortical_BNWR += [BNWR]
        Cortical_NNCs += [Cor_NNCs]

    #do WMD, first convert matrices to binary, then calcuate z score using mean and std of "corticocortical degrees"
    Cortical_wm_mean = {}
    Cortical_wm_std = {}
    Cortical_WMDs = []  #np.zeros(Cortical_CI.size)
    WMDs = []  #np.zeros(Cortical_plus_thalamus_CI.size)
    for ix, c in enumerate(np.arange(0.01, 0.16, 0.01)):

        #threshold by density
        bM = bct.weight_conversion(
            bct.threshold_proportional(Cortical_adj, c, copy=True), 'binarize')
        Cortical_WMDs += [bct.module_degree_zscore(bM, Cortical_CI)]

        #return mean and degree
        for CI in np.unique(Cortical_CI):
            Cortical_wm_mean[ix + 1, CI] = np.nanmean(
                np.sum(bM[Cortical_CI == CI, :][:, Cortical_CI == CI], 1))
            Cortical_wm_std[ix + 1, CI] = np.nanstd(
                np.sum(bM[Cortical_CI == CI, :][:, Cortical_CI == CI], 1))

        #thalamic WMD, threshold by density
        M = bct.weight_conversion(
            bct.threshold_absolute(Thalamocortical_corrmat,
                                   cost_thresholds[ix],
                                   copy=True), 'binarize')

        tha_wmd = np.zeros(Cortical_plus_thalamus_CI.size)
        for i in np.unique(Cortical_CI):
            tha_wmd[Cortical_plus_thalamus_CI==i] = (np.sum(M[Cortical_plus_thalamus_CI==i][:, Cortical_plus_thalamus_CI==i],1)\
            - Cortical_wm_mean[ix+1,i])/Cortical_wm_std[ix+1,i]
        tha_wmd = np.nan_to_num(tha_wmd)
        WMDs += [tha_wmd]

    # organize output
    NNCs = np.array(NNCs)
    BNWRs = np.array(BNWRs)
    PCs = np.array(PCs)
    bPCs = np.array(bPCs)
    WMDs = np.array(WMDs)

    NNCs[:, Cortical_ROIs_positions] = np.array(
        Cortical_NNCs)[:, Cortical_ROIs_positions]
    BNWRs[:, Cortical_ROIs_positions] = np.array(
        Cortical_BNWR)[:, Cortical_ROIs_positions]
    PCs[:, Cortical_ROIs_positions] = np.array(
        Cortical_PCs)[:, Cortical_ROIs_positions]
    bPCs[:, Cortical_ROIs_positions] = np.array(
        Cortical_bPCs)[:, Cortical_ROIs_positions]
    WMDs[:, Cortical_ROIs_positions] = np.array(
        Cortical_WMDs)[:, Cortical_ROIs_positions]

    # average across thresholds, convert into percentage
    mean_NNC = (np.sum(NNCs, axis=0) / 15.0) * 100
    mean_BNWR = (np.sum(BNWRs, axis=0) / 15.0) * 100
    mean_PC = (np.sum(PCs, axis=0) /
               13.5) * 100  #this is the thoretical upperbound
    mean_bPC = (np.sum(bPCs, axis=0) /
                13.5) * 100  #this is the thoretical upperbound
    mean_WMD = (np.sum(WMDs, axis=0) / 15.0) * 100

    return PCs, BNWRs, NNCs, WMDs, bPCs, mean_NNC, mean_BNWR, mean_PC, mean_bPC, mean_WMD