Ejemplo n.º 1
0
def density_thresholding(conn_matrix, thr):
    from pynets import thresholding
    work_thr = 0.0
    conn_matrix = thresholding.normalize(conn_matrix)
    np.fill_diagonal(conn_matrix, 0)
    i = 1
    thr_max = 0.50
    G = nx.from_numpy_matrix(conn_matrix)
    density = nx.density(G)
    while float(work_thr) <= float(thr_max) and float(density) > float(thr):
        work_thr = float(work_thr) + float(0.01)
        conn_matrix = thresholding.threshold_proportional(conn_matrix, work_thr)
        G = nx.from_numpy_matrix(conn_matrix)
        density = nx.density(G)
        print("%s%d%s%.2f%s%.2f%s" % ('Iteratively thresholding -- Iteration ', i, ' -- with thresh: ', float(work_thr), ' and Density: ', float(density), '...'))
        i = i + 1
    return conn_matrix
Ejemplo n.º 2
0
def plot_connectogram(conn_matrix, conn_model, atlas_name, dir_path, ID,
                      NETWORK, label_names):
    import json
    from pynets.thresholding import normalize
    from pathlib import Path
    from random import sample
    from string import ascii_uppercase, ascii_lowercase
    link_comm = True

    conn_matrix = normalize(conn_matrix)
    G = nx.from_numpy_matrix(conn_matrix)

    def doClust(X, clust_levels):
        ##get the linkage diagram
        Z = linkage(
            X,
            'ward',
        )
        ##choose # cluster levels
        cluster_levels = range(1, int(clust_levels))
        ##init array to store labels for each level
        clust_levels_tmp = int(clust_levels) - 1
        label_arr = np.zeros((int(clust_levels_tmp), int(X.shape[0])))
        ##iterate thru levels
        for c in cluster_levels:
            fl = fcluster(Z, c, criterion='maxclust')
            #print(fl)
            label_arr[c - 1, :] = fl
        return label_arr, clust_levels_tmp

    if NETWORK is not None:
        clust_levels = 3
        [label_arr, clust_levels_tmp] = doClust(conn_matrix, clust_levels)
    else:
        if link_comm == True:
            from pynets.netstats import link_communities
            #G_lin = nx.line_graph(G)
            ##Plot link communities
            node_comm_aff_mat = link_communities(conn_matrix,
                                                 type_clustering='single')
            clust_levels = len(node_comm_aff_mat)
            clust_levels_tmp = int(clust_levels) - 1
            mask_mat = np.squeeze(
                np.array([node_comm_aff_mat == 0]).astype('int'))
            label_arr = node_comm_aff_mat * np.expand_dims(
                np.arange(1, clust_levels + 1), axis=1) + mask_mat
        #else:
        ##Plot node communities
        #from pynets.netstats import community_louvain
        #[ci, q] = community_louvain(conn_matrix, gamma=0.75)
        #clust_levels = len(np.unique(ci))
        #clust_levels_tmp = int(clust_levels) - 1

    def get_node_label(node_idx, labels, clust_levels_tmp):
        def get_letters(n, random=False, uppercase=False):
            """Return n letters of the alphabet."""
            letters = (ascii_uppercase if uppercase else ascii_lowercase)
            return json.dumps(
                (sample(letters, n) if random else list(letters[:n])))

        abet = get_letters(clust_levels_tmp)
        node_labels = labels[:, node_idx]
        return ".".join([
            "{}{}".format(abet[i], int(l)) for i, l in enumerate(node_labels)
        ]) + ".{}".format(label_names[node_idx])

    output = []
    for node_idx, connections in enumerate(G.adjacency_list()):
        weight_vec = []
        for i in connections:
            wei = G.get_edge_data(node_idx, int(i))['weight']
            #wei = G_lin.get_edge_data(node_idx,int(i))['weight']
            weight_vec.append(wei)
        entry = {}
        nodes_label = get_node_label(node_idx, label_arr, clust_levels_tmp)
        entry["name"] = nodes_label
        entry["size"] = len(connections)
        entry["imports"] = [
            get_node_label(int(d) - 1, label_arr, clust_levels_tmp)
            for d in connections
        ]
        entry["weights"] = weight_vec
        output.append(entry)

    if NETWORK != None:
        json_file_name = str(
            ID
        ) + '_' + NETWORK + '_connectogram_' + conn_model + '_network.json'
        connectogram_plot = dir_path + '/' + json_file_name
        connectogram_js_sub = dir_path + '/' + str(
            ID) + '_' + NETWORK + '_connectogram_' + conn_model + '_network.js'
        connectogram_js_name = str(
            ID) + '_' + NETWORK + '_connectogram_' + conn_model + '_network.js'
    else:
        json_file_name = str(ID) + '_connectogram_' + conn_model + '.json'
        connectogram_plot = dir_path + '/' + json_file_name
        connectogram_js_sub = dir_path + '/' + str(
            ID) + '_connectogram_' + conn_model + '.js'
        connectogram_js_name = str(ID) + '_connectogram_' + conn_model + '.js'
    save_json(connectogram_plot, output)

    ##Copy index.html and json to dir_path
    #conn_js_path = '/Users/PSYC-dap3463/Applications/PyNets/pynets/connectogram.js'
    #index_html_path = '/Users/PSYC-dap3463/Applications/PyNets/pynets/index.html'
    conn_js_path = Path(__file__).parent / "connectogram.js"
    index_html_path = Path(__file__).parent / "index.html"
    replacements_html = {'connectogram.js': str(connectogram_js_name)}
    with open(index_html_path) as infile, open(str(dir_path + '/index.html'),
                                               'w') as outfile:
        for line in infile:
            for src, target in replacements_html.items():
                line = line.replace(src, target)
            outfile.write(line)
    replacements_js = {'template.json': str(json_file_name)}
    with open(conn_js_path) as infile, open(connectogram_js_sub,
                                            'w') as outfile:
        for line in infile:
            for src, target in replacements_js.items():
                line = line.replace(src, target)
            outfile.write(line)
Ejemplo n.º 3
0
def plot_connectogram(conn_matrix, conn_model, atlas_select, dir_path, ID,
                      network, label_names):
    import json
    from pathlib import Path
    from networkx.readwrite import json_graph
    from pynets.thresholding import normalize
    from pynets.netstats import most_important
    from scipy.cluster.hierarchy import linkage, fcluster
    from nipype.utils.filemanip import save_json

    # Advanced Settings
    comm = 'nodes'
    pruned = False
    #color_scheme = 'interpolateCool'
    #color_scheme = 'interpolateGnBu'
    #color_scheme = 'interpolateOrRd'
    #color_scheme = 'interpolatePuRd'
    #color_scheme = 'interpolateYlOrRd'
    #color_scheme = 'interpolateReds'
    #color_scheme = 'interpolateGreens'
    color_scheme = 'interpolateBlues'
    # Advanced Settings

    conn_matrix = normalize(conn_matrix)
    G = nx.from_numpy_matrix(conn_matrix)
    if pruned is True:
        [G, pruned_nodes] = most_important(G)
        conn_matrix = nx.to_numpy_array(G)

        pruned_nodes.sort(reverse=True)
        for j in pruned_nodes:
            del label_names[label_names.index(label_names[j])]

    def doClust(X, clust_levels):
        # get the linkage diagram
        Z = linkage(X, 'ward')
        # choose # cluster levels
        cluster_levels = range(1, int(clust_levels))
        # init array to store labels for each level
        clust_levels_tmp = int(clust_levels) - 1
        label_arr = np.zeros((int(clust_levels_tmp), int(X.shape[0])))
        # iterate thru levels
        for c in cluster_levels:
            fl = fcluster(Z, c, criterion='maxclust')
            #print(fl)
            label_arr[c - 1, :] = fl
        return label_arr, clust_levels_tmp

    if comm == 'nodes' and len(conn_matrix) > 40:
        from pynets.netstats import modularity_louvain_und_sign

        gamma = nx.density(nx.from_numpy_array(conn_matrix))
        try:
            [node_comm_aff_mat,
             q] = modularity_louvain_und_sign(conn_matrix, gamma=float(gamma))
            print("%s%s%s%s%s" %
                  ('Found ', str(len(np.unique(node_comm_aff_mat))),
                   ' communities with γ=', str(gamma), '...'))
        except:
            print(
                'WARNING: Louvain community detection failed. Proceeding with single community affiliation vector...'
            )
            node_comm_aff_mat = np.ones(conn_matrix.shape[0]).astype('int')
        clust_levels = len(node_comm_aff_mat)
        clust_levels_tmp = int(clust_levels) - 1
        mask_mat = np.squeeze(np.array([node_comm_aff_mat == 0]).astype('int'))
        label_arr = node_comm_aff_mat * np.expand_dims(
            np.arange(1, clust_levels + 1), axis=1) + mask_mat
    elif comm == 'links' and len(conn_matrix) > 40:
        from pynets.netstats import link_communities
        # Plot link communities
        link_comm_aff_mat = link_communities(conn_matrix,
                                             type_clustering='single')
        print("%s%s%s" %
              ('Found ', str(len(link_comm_aff_mat)), ' communities...'))
        clust_levels = len(link_comm_aff_mat)
        clust_levels_tmp = int(clust_levels) - 1
        mask_mat = np.squeeze(np.array([link_comm_aff_mat == 0]).astype('int'))
        label_arr = link_comm_aff_mat * np.expand_dims(
            np.arange(1, clust_levels + 1), axis=1) + mask_mat
    elif len(conn_matrix) > 20:
        print(
            'Graph too small for reliable plotting of communities. Plotting by fcluster instead...'
        )
        if len(conn_matrix) >= 250:
            clust_levels = 7
        elif len(conn_matrix) >= 200:
            clust_levels = 6
        elif len(conn_matrix) >= 150:
            clust_levels = 5
        elif len(conn_matrix) >= 100:
            clust_levels = 4
        elif len(conn_matrix) >= 50:
            clust_levels = 3
        else:
            clust_levels = 2
        [label_arr, clust_levels_tmp] = doClust(conn_matrix, clust_levels)

    def get_node_label(node_idx, labels, clust_levels_tmp):
        from collections import OrderedDict

        def write_roman(num):
            roman = OrderedDict()
            roman[1000] = "M"
            roman[900] = "CM"
            roman[500] = "D"
            roman[400] = "CD"
            roman[100] = "C"
            roman[90] = "XC"
            roman[50] = "L"
            roman[40] = "XL"
            roman[10] = "X"
            roman[9] = "IX"
            roman[5] = "V"
            roman[4] = "IV"
            roman[1] = "I"

            def roman_num(num):
                for r in roman.keys():
                    x, y = divmod(num, r)
                    yield roman[r] * x
                    num -= (r * x)
                    if num > 0:
                        roman_num(num)
                    else:
                        break

            return "".join([a for a in roman_num(num)])

        rn_list = []
        node_idx = node_idx - 1
        node_labels = labels[:, node_idx]
        for k in [int(l) for i, l in enumerate(node_labels)]:
            rn_list.append(json.dumps(write_roman(k)))
        abet = rn_list
        node_lab_alph = ".".join([
            "{}{}".format(abet[i], int(l)) for i, l in enumerate(node_labels)
        ]) + ".{}".format(label_names[node_idx])
        return node_lab_alph

    output = []

    adj_dict = {}
    for i in list(G.adjacency()):
        source = list(i)[0]
        target = list(list(i)[1])
        adj_dict[source] = target

    for node_idx, connections in adj_dict.items():
        weight_vec = []
        for i in connections:
            wei = G.get_edge_data(node_idx, int(i))['weight']
            weight_vec.append(wei)
        entry = {}
        nodes_label = get_node_label(node_idx, label_arr, clust_levels_tmp)
        entry["name"] = nodes_label
        entry["size"] = len(connections)
        entry["imports"] = [
            get_node_label(int(d) - 1, label_arr, clust_levels_tmp)
            for d in connections
        ]
        entry["weights"] = weight_vec
        output.append(entry)

    if network:
        json_file_name = "%s%s%s%s%s%s" % (str(ID), '_', network,
                                           '_connectogram_', conn_model,
                                           '_network.json')
        json_fdg_file_name = "%s%s%s%s%s%s" % (str(ID), '_', network, '_fdg_',
                                               conn_model, '_network.json')
        connectogram_plot = "%s%s%s" % (dir_path, '/', json_file_name)
        fdg_js_sub = "%s%s%s%s%s%s%s%s" % (dir_path, '/', str(ID), '_',
                                           network, '_fdg_', conn_model,
                                           '_network.js')
        fdg_js_sub_name = "%s%s%s%s%s%s" % (str(ID), '_', network, '_fdg_',
                                            conn_model, '_network.js')
        connectogram_js_sub = "%s%s%s%s%s%s%s%s" % (dir_path, '/', str(
            ID), '_', network, '_connectogram_', conn_model, '_network.js')
        connectogram_js_name = "%s%s%s%s%s%s" % (
            str(ID), '_', network, '_connectogram_', conn_model, '_network.js')
    else:
        json_file_name = "%s%s%s%s" % (str(ID), '_connectogram_', conn_model,
                                       '.json')
        json_fdg_file_name = "%s%s%s%s" % (str(ID), '_fdg_', conn_model,
                                           '.json')
        connectogram_plot = "%s%s%s" % (dir_path, '/', json_file_name)
        connectogram_js_sub = "%s%s%s%s%s%s" % (
            dir_path, '/', str(ID), '_connectogram_', conn_model, '.js')
        fdg_js_sub = "%s%s%s%s%s%s" % (dir_path, '/', str(ID), '_fdg_',
                                       conn_model, '.js')
        fdg_js_sub_name = "%s%s%s%s" % (str(ID), '_fdg_', conn_model, '.js')
        connectogram_js_name = "%s%s%s%s" % (str(ID), '_connectogram_',
                                             conn_model, '.js')
    save_json(connectogram_plot, output)

    # Force-directed graphing
    G = nx.from_numpy_matrix(np.round(conn_matrix.astype('float64'), 6))
    data = json_graph.node_link_data(G)
    data.pop('directed', None)
    data.pop('graph', None)
    data.pop('multigraph', None)
    for k in range(len(data['links'])):
        data['links'][k]['value'] = data['links'][k].pop('weight')
    for k in range(len(data['nodes'])):
        data['nodes'][k]['id'] = str(data['nodes'][k]['id'])
    for k in range(len(data['links'])):
        data['links'][k]['source'] = str(data['links'][k]['source'])
        data['links'][k]['target'] = str(data['links'][k]['target'])

    # Add community structure
    for k in range(len(data['nodes'])):
        data['nodes'][k]['group'] = str(label_arr[0][k])

    # Add node labels
    for k in range(len(data['nodes'])):
        data['nodes'][k]['name'] = str(label_names[k])

    out_file = "%s%s%s" % (dir_path, '/', str(json_fdg_file_name))
    save_json(out_file, data)

    # Copy index.html and json to dir_path
    #conn_js_path = '/Users/PSYC-dap3463/Applications/PyNets/pynets/connectogram.js'
    #index_html_path = '/Users/PSYC-dap3463/Applications/PyNets/pynets/index.html'
    conn_js_path = str(Path(__file__).parent / "connectogram.js")
    index_html_path = str(Path(__file__).parent / "index.html")
    fdg_replacements_js = {"FD_graph.json": str(json_fdg_file_name)}
    replacements_html = {
        'connectogram.js': str(connectogram_js_name),
        'fdg.js': str(fdg_js_sub_name)
    }
    fdg_js_path = str(Path(__file__).parent / "fdg.js")
    with open(index_html_path) as infile, open(str(dir_path + '/index.html'),
                                               'w') as outfile:
        for line in infile:
            for src, target in replacements_html.items():
                line = line.replace(src, target)
            outfile.write(line)

    replacements_js = {
        'template.json': str(json_file_name),
        'interpolateCool': str(color_scheme)
    }
    with open(conn_js_path) as infile, open(connectogram_js_sub,
                                            'w') as outfile:
        for line in infile:
            for src, target in replacements_js.items():
                line = line.replace(src, target)
            outfile.write(line)

    with open(fdg_js_path) as infile, open(fdg_js_sub, 'w') as outfile:
        for line in infile:
            for src, target in fdg_replacements_js.items():
                line = line.replace(src, target)
            outfile.write(line)

    return
Ejemplo n.º 4
0
def structural_plotting(conn_matrix,
                        uatlas,
                        streamlines_mni,
                        template_mask,
                        interactive=False):
    """

    :param conn_matrix:
    :param uatlas:
    :param streamlines_mni:
    :param template_mask:
    :param interactive:
    :return:
    """
    import nibabel as nib
    import numpy as np
    import networkx as nx
    import os
    import pkg_resources
    from nibabel.affines import apply_affine
    from fury import actor, window, colormap, ui
    from dipy.tracking.utils import streamline_near_roi
    from nilearn.plotting import find_parcellation_cut_coords
    from nilearn.image import resample_to_img
    from pynets.thresholding import normalize
    from pynets.nodemaker import mmToVox

    ch2better_loc = pkg_resources.resource_filename(
        "pynets", "templates/ch2better.nii.gz")

    # Instantiate scene
    r = window.Renderer()

    # Set camera
    r.set_camera(position=(-176.42, 118.52, 128.20),
                 focal_point=(113.30, 128.31, 76.56),
                 view_up=(0.18, 0.00, 0.98))

    # Load atlas rois
    atlas_img = nib.load(uatlas)
    atlas_img_data = atlas_img.get_data()

    # Collapse list of connected streamlines for visualization
    streamlines = nib.streamlines.load(streamlines_mni).streamlines
    parcels = []
    i = 0
    for roi in np.unique(atlas_img_data)[1:]:
        parcels.append(atlas_img_data == roi)
        i = i + 1

    # Add streamlines as cloud of 'white-matter'
    streamlines_actor = actor.line(streamlines,
                                   colormap.create_colormap(np.ones(
                                       [len(streamlines)]),
                                                            name='Greys_r',
                                                            auto=True),
                                   lod_points=10000,
                                   depth_cue=True,
                                   linewidth=0.2,
                                   fake_tube=True,
                                   opacity=1.0)
    r.add(streamlines_actor)

    # Creat palette of roi colors and add them to the scene as faint contours
    roi_colors = np.random.rand(int(np.max(atlas_img_data)), 3)
    parcel_contours = []
    i = 0
    for roi in np.unique(atlas_img_data)[1:]:
        include_roi_coords = np.array(np.where(atlas_img_data == roi)).T
        x_include_roi_coords = apply_affine(np.eye(4), include_roi_coords)
        bool_list = []
        for sl in streamlines:
            bool_list.append(
                streamline_near_roi(sl,
                                    x_include_roi_coords,
                                    tol=1.0,
                                    mode='either_end'))
        if sum(bool_list) > 0:
            print('ROI: ' + str(i))
            parcel_contours.append(
                actor.contour_from_roi(atlas_img_data == roi,
                                       color=roi_colors[i],
                                       opacity=0.2))
        else:
            pass
        i = i + 1

    for vol_actor in parcel_contours:
        r.add(vol_actor)

    # Get voxel coordinates of parcels and add them as 3d spherical centroid nodes
    [coords, labels] = find_parcellation_cut_coords(atlas_img,
                                                    background_label=0,
                                                    return_labels=True)

    coords_vox = []
    for i in coords:
        coords_vox.append(mmToVox(atlas_img.affine, i))
    coords_vox = list(set(list(tuple(x) for x in coords_vox)))

    # Build an edge list of 3d lines
    G = nx.from_numpy_array(normalize(conn_matrix))
    for i in G.nodes():
        nx.set_node_attributes(G, {i: coords_vox[i]}, labels[i])

    G.remove_nodes_from(list(nx.isolates(G)))
    G_filt = nx.Graph()
    fedges = filter(lambda x: G.degree()[x[0]] > 0 and G.degree()[x[1]] > 0,
                    G.edges())
    G_filt.add_edges_from(fedges)

    coord_nodes = []
    for i in range(len(G.edges())):
        edge = list(G.edges())[i]
        [x, y] = edge
        x_coord = list(G.nodes[x].values())[0]
        x_label = list(G.nodes[x].keys())[0]
        l_x = actor.label(text=str(x_label),
                          pos=x_coord,
                          scale=(1, 1, 1),
                          color=(50, 50, 50))
        r.add(l_x)
        y_coord = list(G.nodes[y].values())[0]
        y_label = list(G.nodes[y].keys())[0]
        l_y = actor.label(text=str(y_label),
                          pos=y_coord,
                          scale=(1, 1, 1),
                          color=(50, 50, 50))
        r.add(l_y)
        coord_nodes.append(x_coord)
        coord_nodes.append(y_coord)
        c = actor.line([(x_coord, y_coord)],
                       window.colors.coral,
                       linewidth=100 * (float(G.get_edge_data(x, y)['weight']))
                       ^ 2)
        r.add(c)

    point_actor = actor.point(list(set(coord_nodes)),
                              window.colors.grey,
                              point_radius=0.75)
    r.add(point_actor)

    # Load glass brain template and resample to MNI152_2mm brain
    template_img = nib.load(ch2better_loc)
    template_target_img = nib.load(template_mask)
    res_brain_img = resample_to_img(template_img, template_target_img)
    template_img_data = res_brain_img.get_data().astype('bool')
    template_actor = actor.contour_from_roi(template_img_data,
                                            color=(50, 50, 50),
                                            opacity=0.05)
    r.add(template_actor)

    # Show scene
    if interactive is True:
        window.show(r, size=(600, 600), reset_camera=False)
    else:
        fig_path = os.path.dirname(streamlines_mni) + '/3d_connectome_fig.png'
        window.record(r, out_path=fig_path, size=(600, 600))

    return
Ejemplo n.º 5
0
def extractnetstats(ID,
                    network,
                    thr,
                    conn_model,
                    est_path,
                    mask,
                    out_file=None):
    from pynets import thresholding, utils

    pruning = True

    ##Load and threshold matrix
    in_mat = np.array(np.genfromtxt(est_path))
    in_mat = thresholding.autofix(in_mat)

    ##Normalize connectivity matrix (weights between 0-1)
    in_mat = thresholding.normalize(in_mat)

    ##Get hyperbolic tangent of matrix if non-sparse (i.e. fischer r-to-z transform)
    if conn_model == 'corr':
        in_mat = np.arctanh(in_mat)
        in_mat[np.isnan(in_mat)] = 0
        in_mat[np.isinf(in_mat)] = 1

    ##Get dir_path
    dir_path = os.path.dirname(os.path.realpath(est_path))

    ##Load numpy matrix as networkx graph
    G_pre = nx.from_numpy_matrix(in_mat)

    ##Prune irrelevant nodes (i.e. nodes who are fully disconnected from the graph and/or those whose betweenness centrality are > 3 standard deviations below the mean)
    if pruning == True:
        [G_pruned, _, _] = most_important(G_pre)
    else:
        G_pruned = G_pre

    ##Make directed if sparse
    if conn_model != 'corr' and conn_model != 'cov' and conn_model != 'tangent':
        G_di = nx.DiGraph(G_pruned)
        G_dir = G_di.to_directed()
        G = G_pruned
    else:
        G = G_pruned

    ##Get corresponding matrix
    in_mat = nx.to_numpy_array(G)

    ##Print graph summary
    print('\n\nThreshold: ' + str(thr))
    print('Source File: ' + str(est_path))
    info_list = list(nx.info(G).split('\n'))[2:]
    for i in info_list:
        print(i)

    try:
        G_dir
        print('Analyzing DIRECTED graph when applicable...')
    except:
        print('Graph is UNDIRECTED')

    if conn_model == 'corr' or conn_model == 'cov' or conn_model == 'tangent':
        if nx.is_connected(G) == True:
            num_conn_comp = nx.number_connected_components(G)
            print('Graph is CONNECTED with ' + str(num_conn_comp) +
                  ' connected component(s)')
        else:
            print('Graph is DISCONNECTED')
    print('\n')

    ##Create Length matrix
    mat_len = thresholding.weight_conversion(in_mat, 'lengths')
    ##Load numpy matrix as networkx graph
    G_len = nx.from_numpy_matrix(mat_len)

    ##Save G as gephi file
    if mask:
        if network:
            nx.write_graphml(
                G, dir_path + '/' + ID + '_' + network + '_' +
                str(os.path.basename(mask).split('.')[0]) + '.graphml')
        else:
            nx.write_graphml(
                G, dir_path + '/' + ID + '_' +
                str(os.path.basename(mask).split('.')[0]) + '.graphml')
    else:
        if network:
            nx.write_graphml(G,
                             dir_path + '/' + ID + '_' + network + '.graphml')
        else:
            nx.write_graphml(G, dir_path + '/' + ID + '.graphml')

    ###############################################################
    ########### Calculate graph metrics from graph G ##############
    ###############################################################
    from networkx.algorithms import degree_assortativity_coefficient, average_clustering, average_shortest_path_length, degree_pearson_correlation_coefficient, graph_number_of_cliques, transitivity, betweenness_centrality, eigenvector_centrality, communicability_betweenness_centrality, clustering, degree_centrality
    from pynets.netstats import average_local_efficiency, global_efficiency, local_efficiency, modularity_louvain_dir, smallworldness
    ##For non-nodal scalar metrics from custom functions, add the name of the function to metric_list and add the function  (with a G-only input) to the netstats module.
    metric_list = [
        global_efficiency, average_local_efficiency, smallworldness,
        degree_assortativity_coefficient, average_clustering,
        average_shortest_path_length, degree_pearson_correlation_coefficient,
        graph_number_of_cliques, transitivity
    ]

    ##Custom Weight Parameter
    #custom_weight = 0.25
    custom_weight = None

    ##Iteratively run functions from above metric list that generate single scalar output
    num_mets = len(metric_list)
    net_met_arr = np.zeros([num_mets, 2], dtype='object')
    j = 0
    for i in metric_list:
        met_name = str(i).split('<function ')[1].split(' at')[0]
        net_met = met_name
        try:
            if i is 'average_shortest_path_length':
                try:
                    try:
                        net_met_val = float(i(G_dir))
                        print('Calculating from directed graph...')
                    except:
                        net_met_val = float(i(G))
                except:
                    ##case where G is not fully connected
                    net_met_val = float(
                        average_shortest_path_length_for_all(G))
            if custom_weight is not None and i is 'degree_assortativity_coefficient' or i is 'global_efficiency' or i is 'average_local_efficiency' or i is 'average_clustering':
                custom_weight_param = 'weight = ' + str(custom_weight)
                try:
                    net_met_val = float(i(G_dir, custom_weight_param))
                    print('Calculating from directed graph...')
                except:
                    net_met_val = float(i(G, custom_weight_param))
            else:
                try:
                    net_met_val = float(i(G_dir))
                    print('Calculating from directed graph...')
                except:
                    net_met_val = float(i(G))
        except:
            net_met_val = np.nan
        net_met_arr[j, 0] = net_met
        net_met_arr[j, 1] = net_met_val
        print(net_met)
        print(str(net_met_val))
        print('\n')
        j = j + 1
    net_met_val_list = list(net_met_arr[:, 1])

    ##Run miscellaneous functions that generate multiple outputs
    ##Calculate modularity using the Louvain algorithm
    [community_aff, modularity] = modularity_louvain_dir(in_mat)

    ##Calculate core-periphery subdivision
    [Coreness_vec, Coreness_q] = core_periphery_dir(in_mat)

    ##Local Efficiency
    try:
        try:
            le_vector = local_efficiency(G_dir)
        except:
            le_vector = local_efficiency(G)
        print('\nExtracting Local Efficiency vector for all network nodes...')
        le_vals = list(le_vector.values())
        le_nodes = list(le_vector.keys())
        num_nodes = len(le_nodes)
        le_arr = np.zeros([num_nodes + 1, 2], dtype='object')
        j = 0
        for i in range(num_nodes):
            le_arr[j, 0] = str(le_nodes[j]) + '_local_efficiency'
            #print('\n' + str(le_nodes[j]) + '_local_efficiency')
            try:
                le_arr[j, 1] = le_vals[j]
            except:
                le_arr[j, 1] = np.nan
            #print(str(le_vals[j]))
            j = j + 1
        le_arr[num_nodes, 0] = 'MEAN_local_efficiency'
        nonzero_arr_le = np.delete(le_arr[:, 1], [0])
        le_arr[num_nodes, 1] = np.mean(nonzero_arr_le)
        print('Mean Local Efficiency across nodes: ' +
              str(le_arr[num_nodes, 1]))
        print('\n')
    except:
        pass

    ##Local Clustering
    try:
        cl_vector = clustering(G)
        print('\nExtracting Local Clustering vector for all network nodes...')
        cl_vals = list(cl_vector.values())
        cl_nodes = list(cl_vector.keys())
        num_nodes = len(cl_nodes)
        cl_arr = np.zeros([num_nodes + 1, 2], dtype='object')
        j = 0
        for i in range(num_nodes):
            cl_arr[j, 0] = str(cl_nodes[j]) + '_local_clustering'
            #print('\n' + str(cl_nodes[j]) + '_local_clustering')
            try:
                cl_arr[j, 1] = cl_vals[j]
            except:
                cl_arr[j, 1] = np.nan
            #print(str(cl_vals[j]))
            j = j + 1
        cl_arr[num_nodes, 0] = 'MEAN_local_efficiency'
        nonzero_arr_cl = np.delete(cl_arr[:, 1], [0])
        cl_arr[num_nodes, 1] = np.mean(nonzero_arr_cl)
        print('Mean Local Clustering across nodes: ' +
              str(cl_arr[num_nodes, 1]))
        print('\n')
    except:
        pass

    ##Degree centrality
    try:
        try:
            dc_vector = degree_centrality(G_dir)
        except:
            dc_vector = degree_centrality(G)
        print('\nExtracting Degree Centrality vector for all network nodes...')
        dc_vals = list(dc_vector.values())
        dc_nodes = list(dc_vector.keys())
        num_nodes = len(dc_nodes)
        dc_arr = np.zeros([num_nodes + 1, 2], dtype='object')
        j = 0
        for i in range(num_nodes):
            dc_arr[j, 0] = str(dc_nodes[j]) + '_degree_centrality'
            #print('\n' + str(dc_nodes[j]) + '_degree_centrality')
            try:
                dc_arr[j, 1] = dc_vals[j]
            except:
                dc_arr[j, 1] = np.nan
            #print(str(cl_vals[j]))
            j = j + 1
        dc_arr[num_nodes, 0] = 'MEAN_degree_centrality'
        nonzero_arr_dc = np.delete(dc_arr[:, 1], [0])
        dc_arr[num_nodes, 1] = np.mean(nonzero_arr_dc)
        print('Mean Degree Centrality across nodes: ' +
              str(dc_arr[num_nodes, 1]))
        print('\n')
    except:
        pass

    ##Betweenness Centrality
    try:
        bc_vector = betweenness_centrality(G_len, normalized=True)
        print(
            '\nExtracting Betweeness Centrality vector for all network nodes...'
        )
        bc_vals = list(bc_vector.values())
        bc_nodes = list(bc_vector.keys())
        num_nodes = len(bc_nodes)
        bc_arr = np.zeros([num_nodes + 1, 2], dtype='object')
        j = 0
        for i in range(num_nodes):
            bc_arr[j, 0] = str(bc_nodes[j]) + '_betweenness_centrality'
            #print('\n' + str(bc_nodes[j]) + '_betw_cent')
            try:
                bc_arr[j, 1] = bc_vals[j]
            except:
                bc_arr[j, 1] = np.nan
            #print(str(bc_vals[j]))
            j = j + 1
        bc_arr[num_nodes, 0] = 'MEAN_betw_cent'
        nonzero_arr_betw_cent = np.delete(bc_arr[:, 1], [0])
        bc_arr[num_nodes, 1] = np.mean(nonzero_arr_betw_cent)
        print('Mean Betweenness Centrality across nodes: ' +
              str(bc_arr[num_nodes, 1]))
        print('\n')
    except:
        pass

    ##Eigenvector Centrality
    try:
        try:
            ec_vector = eigenvector_centrality(G_dir, max_iter=1000)
        except:
            ec_vector = eigenvector_centrality(G, max_iter=1000)
        print(
            '\nExtracting Eigenvector Centrality vector for all network nodes...'
        )
        ec_vals = list(ec_vector.values())
        ec_nodes = list(ec_vector.keys())
        num_nodes = len(ec_nodes)
        ec_arr = np.zeros([num_nodes + 1, 2], dtype='object')
        j = 0
        for i in range(num_nodes):
            ec_arr[j, 0] = str(ec_nodes[j]) + '_eigenvector_centrality'
            #print('\n' + str(ec_nodes[j]) + '_eig_cent')
            try:
                ec_arr[j, 1] = ec_vals[j]
            except:
                ec_arr[j, 1] = np.nan
            #print(str(ec_vals[j]))
            j = j + 1
        ec_arr[num_nodes, 0] = 'MEAN_eig_cent'
        nonzero_arr_eig_cent = np.delete(ec_arr[:, 1], [0])
        ec_arr[num_nodes, 1] = np.mean(nonzero_arr_eig_cent)
        print('Mean Eigenvector Centrality across nodes: ' +
              str(ec_arr[num_nodes, 1]))
        print('\n')
    except:
        pass

    ##Communicability Centrality
    try:
        cc_vector = communicability_betweenness_centrality(G, normalized=True)
        print(
            '\nExtracting Communicability Centrality vector for all network nodes...'
        )
        cc_vals = list(cc_vector.values())
        cc_nodes = list(cc_vector.keys())
        num_nodes = len(cc_nodes)
        cc_arr = np.zeros([num_nodes + 1, 2], dtype='object')
        j = 0
        for i in range(num_nodes):
            cc_arr[j, 0] = str(cc_nodes[j]) + '_communicability_centrality'
            #print('\n' + str(cc_nodes[j]) + '_comm_cent')
            try:
                cc_arr[j, 1] = cc_vals[j]
            except:
                cc_arr[j, 1] = np.nan
            #print(str(cc_vals[j]))
            j = j + 1
        cc_arr[num_nodes, 0] = 'MEAN_comm_cent'
        nonzero_arr_comm_cent = np.delete(cc_arr[:, 1], [0])
        cc_arr[num_nodes, 1] = np.mean(nonzero_arr_comm_cent)
        print('Mean Communicability Centrality across nodes: ' +
              str(cc_arr[num_nodes, 1]))
        print('\n')
    except:
        pass

    ##Rich club coefficient
    try:
        rc_vector = rich_club_coefficient(G, normalized=True)
        print(
            '\nExtracting Rich Club Coefficient vector for all network nodes...'
        )
        rc_vals = list(rc_vector.values())
        rc_edges = list(rc_vector.keys())
        num_edges = len(rc_edges)
        rc_arr = np.zeros([num_edges + 1, 2], dtype='object')
        j = 0
        for i in range(num_edges):
            rc_arr[j, 0] = str(rc_edges[j]) + '_rich_club'
            #print('\n' + str(rc_edges[j]) + '_rich_club')
            try:
                rc_arr[j, 1] = rc_vals[j]
            except:
                rc_arr[j, 1] = np.nan
            #print(str(rc_vals[j]))
            j = j + 1
        ##Add mean
        rc_arr[num_edges, 0] = 'MEAN_rich_club'
        nonzero_arr_rich_club = np.delete(rc_arr[:, 1], [0])
        rc_arr[num_edges, 1] = np.mean(nonzero_arr_rich_club)
        print('Mean Rich Club Coefficient across edges: ' +
              str(rc_arr[num_edges, 1]))
        print('\n')
    except:
        pass

    ##Create a list of metric names for scalar metrics
    metric_list_names = []
    net_met_val_list_final = net_met_val_list
    for i in net_met_arr[:, 0]:
        metric_list_names.append(i)

    ##Add modularity measure
    try:
        metric_list_names.append('Modularity')
        net_met_val_list_final.append(modularity)
    except:
        pass

    ##Add Core/Periphery measure
    try:
        metric_list_names.append('Coreness')
        net_met_val_list_final.append(Coreness_q)
    except:
        pass

    ##Add local efficiency measures
    try:
        for i in le_arr[:, 0]:
            metric_list_names.append(i)
        net_met_val_list_final = net_met_val_list_final + list(le_arr[:, 1])
    except:
        pass

    ##Add local clustering measures
    try:
        for i in cl_arr[:, 0]:
            metric_list_names.append(i)
        net_met_val_list_final = net_met_val_list_final + list(cl_arr[:, 1])
    except:
        pass

    ##Add centrality measures
    try:
        for i in dc_arr[:, 0]:
            metric_list_names.append(i)
        net_met_val_list_final = net_met_val_list_final + list(dc_arr[:, 1])
    except:
        pass
    try:
        for i in bc_arr[:, 0]:
            metric_list_names.append(i)
        net_met_val_list_final = net_met_val_list_final + list(bc_arr[:, 1])
    except:
        pass
    try:
        for i in ec_arr[:, 0]:
            metric_list_names.append(i)
        net_met_val_list_final = net_met_val_list_final + list(ec_arr[:, 1])
    except:
        pass
    try:
        for i in cc_arr[:, 0]:
            metric_list_names.append(i)
        net_met_val_list_final = net_met_val_list_final + list(cc_arr[:, 1])
    except:
        pass

    ##Add rich club measure
    try:
        for i in rc_arr[:, 0]:
            metric_list_names.append(i)
        net_met_val_list_final = net_met_val_list_final + list(rc_arr[:, 1])
    except:
        pass

    ##Save metric names as pickle
    try:
        import cPickle
    except ImportError:
        import _pickle as cPickle

    if mask != None:
        if network != None:
            met_list_picke_path = os.path.dirname(os.path.abspath(
                est_path)) + '/net_metric_list_' + network + '_' + str(
                    os.path.basename(mask).split('.')[0])
        else:
            met_list_picke_path = os.path.dirname(
                os.path.abspath(est_path)) + '/net_metric_list_' + str(
                    os.path.basename(mask).split('.')[0])
    else:
        if network != None:
            met_list_picke_path = os.path.dirname(
                os.path.abspath(est_path)) + '/net_metric_list_' + network
        else:
            met_list_picke_path = os.path.dirname(
                os.path.abspath(est_path)) + '/net_metric_list'
    cPickle.dump(metric_list_names, open(met_list_picke_path, 'wb'))

    ##And save results to csv
    out_path = utils.create_csv_path(ID, network, conn_model, thr, mask,
                                     dir_path)
    np.savetxt(out_path, net_met_val_list_final)

    return (out_path)
Ejemplo n.º 6
0
def link_communities(W, type_clustering='single'):
    from pynets.thresholding import normalize
    '''
    The optimal community structure is a subdivision of the network into
    nonoverlapping groups of nodes which maximizes the number of within-group
    edges and minimizes the number of between-group edges.
    This algorithm uncovers overlapping community structure via hierarchical
    clustering of network links. This algorithm is generalized for
    weighted/directed/fully-connected networks
    Parameters
    ----------
    W : NxN np.array
        directed weighted/binary adjacency matrix
    type_clustering : str
        type of hierarchical clustering. 'single' for single-linkage,
        'complete' for complete-linkage. Default value='single'
    Returns
    -------
    M : CxN np.ndarray
        nodal community affiliation matrix.
    '''
    n = len(W)
    W = normalize(W)

    if type_clustering not in ('single', 'complete'):
        print('Error: Unrecognized clustering type')

    # set diagonal to mean weights
    np.fill_diagonal(W, 0)
    W[range(n), range(n)] = (
        np.sum(W, axis=0) / np.sum(np.logical_not(W), axis=0) +
        np.sum(W.T, axis=0) / np.sum(np.logical_not(W.T), axis=0)) / 2

    # out/in norm squared
    No = np.sum(W**2, axis=1)
    Ni = np.sum(W**2, axis=0)

    # weighted in/out jaccard
    Jo = np.zeros((n, n))
    Ji = np.zeros((n, n))

    for b in range(n):
        for c in range(n):
            Do = np.dot(W[b, :], W[c, :].T)
            Jo[b, c] = Do / (No[b] + No[c] - Do)

            Di = np.dot(W[:, b].T, W[:, c])
            Ji[b, c] = Di / (Ni[b] + Ni[c] - Di)

    # get link similarity
    A, B = np.where(
        np.logical_and(np.logical_or(W, W.T), np.triu(np.ones((n, n)), 1)))
    m = len(A)
    Ln = np.zeros((m, 2), dtype=np.int32)  # link nodes
    Lw = np.zeros((m, ))  # link weights

    for i in range(m):
        Ln[i, :] = (A[i], B[i])
        Lw[i] = (W[A[i], B[i]] + W[B[i], A[i]]) / 2

    ES = np.zeros((m, m), dtype=np.float32)  # link similarity
    for i in range(m):
        for j in range(m):
            if Ln[i, 0] == Ln[j, 0]:
                a = Ln[i, 0]
                b = Ln[i, 1]
                c = Ln[j, 1]
            elif Ln[i, 0] == Ln[j, 1]:
                a = Ln[i, 0]
                b = Ln[i, 1]
                c = Ln[j, 0]
            elif Ln[i, 1] == Ln[j, 0]:
                a = Ln[i, 1]
                b = Ln[i, 0]
                c = Ln[j, 1]
            elif Ln[i, 1] == Ln[j, 1]:
                a = Ln[i, 1]
                b = Ln[i, 0]
                c = Ln[j, 0]
            else:
                continue

            ES[i, j] = (W[a, b] * W[a, c] * Ji[b, c] +
                        W[b, a] * W[c, a] * Jo[b, c]) / 2

    np.fill_diagonal(ES, 0)

    # perform hierarchical clustering

    C = np.zeros((m, m), dtype=np.int32)  # community affiliation matrix

    Nc = C.copy()
    Mc = np.zeros((m, m), dtype=np.float32)
    Dc = Mc.copy()  # community nodes, links, density

    U = np.arange(m)  # initial community assignments
    C[0, :] = np.arange(m)

    for i in range(m - 1):
        print('Hierarchy %i' % i)

        #time1 = time.time()

        for j in range(len(U)):  # loop over communities
            ixes = C[i, :] == U[j]  # get link indices

            links = np.sort(Lw[ixes])
            #nodes = np.sort(Ln[ixes,:].flat)

            nodes = np.sort(
                np.reshape(Ln[ixes, :], 2 * np.size(np.where(ixes))))

            # get unique nodes
            nodulo = np.append(nodes[0], (nodes[1:])[nodes[1:] != nodes[:-1]])
            #nodulo = ((nodes[1:])[nodes[1:] != nodes[:-1]])

            nc = len(nodulo)
            #nc = len(nodulo)+1
            mc = np.sum(links)
            min_mc = np.sum(links[:nc - 1])  # minimal weight
            dc = (mc - min_mc) / (nc *
                                  (nc - 1) / 2 - min_mc)  # community density

            if np.array(dc).shape is not ():
                print(dc)
                print(dc.shape)

            Nc[i, j] = nc
            Mc[i, j] = mc
            Dc[i, j] = dc if not np.isnan(dc) else 0

        #time2 = time.time()
        #print('compute densities time', time2-time1)
        C[i + 1, :] = C[i, :]  # copy current partition
        #if i in (2693,):
        #    import pdb
        #    pdb.set_trace()
        # Profiling and debugging show that this line, finding
        # the max values in this matrix, take about 3x longer than the
        # corresponding matlab version. Can it be improved?

        u1, u2 = np.where(ES[np.ix_(U, U)] == np.max(ES[np.ix_(U, U)]))

        if np.size(u1) > 2:
            # pick one
            wehr, = np.where((u1 == u2[0]))

            uc = np.squeeze((u1[0], u2[0]))
            ud = np.squeeze((u1[wehr], u2[wehr]))

            u1 = uc
            u2 = ud

        #time25 = time.time()
        #print('copy and max time', time25-time2)
        # get unique links (implementation of matlab sortrows)
        #ugl = np.array((u1,u2))
        ugl = np.sort((u1, u2), axis=1)
        ug_rows = ugl[np.argsort(ugl, axis=0)[:, 0]]
        # implementation of matlab unique(A, 'rows')
        unq_rows = np.vstack({tuple(row) for row in ug_rows})
        V = U[unq_rows]
        #time3 = time.time()
        #print('sortrows time', time3-time25)

        for j in range(len(V)):
            if type_clustering == 'single':
                x = np.max(ES[V[j, :], :], axis=0)
            elif type_clustering == 'complete':
                x = np.min(ES[V[j, :], :], axis=0)

            # assign distances to whole clusters


#            import pdb
#            pdb.set_trace()
            ES[V[j, :], :] = np.array((x, x))
            ES[:, V[j, :]] = np.transpose((x, x))

            # clear diagonal
            ES[V[j, 0], V[j, 0]] = 0
            ES[V[j, 1], V[j, 1]] = 0

            # merge communities
            C[i + 1, C[i + 1, :] == V[j, 1]] = V[j, 0]
            V[V == V[j, 1]] = V[j, 0]

        #time4 = time.time()
        #print('get linkages time', time4-time3)
        U = np.unique(C[i + 1, :])
        if len(U) == 1:
            break
        #time5 = time.time()
        #print('get unique communities time', time5-time4)
    #Dc[ np.where(np.isnan(Dc)) ]=0
    i = np.argmax(np.sum(Dc * Mc, axis=1))
    U = np.unique(C[i, :])
    M = np.zeros((len(U), n))
    for j in range(len(U)):
        M[j, np.unique(Ln[C[i, :] == U[j], :])] = 1

    M = M[np.sum(M, axis=1) > 2, :]
    return M
Ejemplo n.º 7
0
def wb_connectome_with_us_atlas_coords(input_file, ID, atlas_select, NETWORK,
                                       node_size, mask, thr, parlistfile,
                                       all_nets, conn_model, dens_thresh, conf,
                                       adapt_thresh, plot_switch,
                                       bedpostx_dir):
    nilearn_atlases = [
        'atlas_aal', 'atlas_craddock_2012', 'atlas_destrieux_2009'
    ]

    ##Input is nifti file
    func_file = input_file

    ##Test if atlas_select is a nilearn atlas
    if atlas_select in nilearn_atlases:
        try:
            parlistfile = getattr(datasets, 'fetch_%s' % atlas_select)().maps
            try:
                label_names = getattr(datasets,
                                      'fetch_%s' % atlas_select)().labels
            except:
                label_names = None
            try:
                networks_list = getattr(datasets,
                                        'fetch_%s' % atlas_select)().networks
            except:
                networks_list = None
        except:
            print(
                'PyNets is not ready for multi-scale atlases like BASC just yet!'
            )
            sys.exit()

    ##Fetch user-specified atlas coords
    [coords, atlas_name,
     par_max] = nodemaker.get_names_and_coords_of_parcels(parlistfile)
    atlas_select = atlas_name

    try:
        label_names
    except:

        label_names = np.arange(len(coords) +
                                1)[np.arange(len(coords) + 1) != 0].tolist()

    ##Get subject directory path
    dir_path = os.path.dirname(
        os.path.realpath(func_file)) + '/' + atlas_select
    if not os.path.exists(dir_path):
        os.makedirs(dir_path)

    ##Get coord membership dictionary if all_nets option triggered
    if all_nets != None:
        try:
            networks_list
        except:
            networks_list = None
        [membership,
         membership_plotting] = nodemaker.get_mem_dict(func_file, coords,
                                                       networks_list)

    ##Describe user atlas coords
    print('\n' + atlas_name + ' comes with {0} '.format(par_max) + 'parcels' +
          '\n')
    print('\n' + 'Stacked atlas coordinates in array of shape {0}.'.format(
        coords.shape) + '\n')

    ##Mask coordinates
    if mask is not None:
        [coords, label_names] = nodemaker.coord_masker(mask, coords,
                                                       label_names)

    ##Save coords and label_names to pickles
    coord_path = dir_path + '/coords_wb_' + str(thr) + '.pkl'
    with open(coord_path, 'wb') as f:
        pickle.dump(coords, f)

    labels_path = dir_path + '/labelnames_wb_' + str(thr) + '.pkl'
    with open(labels_path, 'wb') as f:
        pickle.dump(label_names, f)

    if bedpostx_dir is not None:
        from pynets.diffconnectometry import run_struct_mapping
        FSLDIR = os.environ['FSLDIR']
        try:
            FSLDIR
        except NameError:
            print('FSLDIR environment variable not set!')
        est_path2 = run_struct_mapping(FSLDIR, ID, bedpostx_dir, dir_path,
                                       NETWORK, coords, node_size)

    ##extract time series from whole brain parcellaions:
    parcellation = nib.load(parlistfile)
    parcel_masker = input_data.NiftiLabelsMasker(labels_img=parcellation,
                                                 background_label=0,
                                                 memory='nilearn_cache',
                                                 memory_level=5,
                                                 standardize=True)
    ts_within_parcels = parcel_masker.fit_transform(func_file, confounds=conf)
    print('\n' +
          'Time series has {0} samples'.format(ts_within_parcels.shape[0]) +
          '\n')

    ##Save time series as txt file
    out_path_ts = dir_path + '/' + ID + '_whole_brain_ts_within_parcels.txt'
    np.savetxt(out_path_ts, ts_within_parcels)

    ##Fit connectivity model
    if adapt_thresh is not False:
        if os.path.isfile(est_path2) == True:
            [conn_matrix, est_path, edge_threshold,
             thr] = thresholding.adaptive_thresholding(ts_within_parcels,
                                                       conn_model, NETWORK, ID,
                                                       est_path2, dir_path)
        else:
            print('No structural mx found! Exiting...')
            sys.exit(0)
    elif dens_thresh is None:
        edge_threshold = str(float(thr) * 100) + '%'
        [conn_matrix,
         est_path] = graphestimation.get_conn_matrix(ts_within_parcels,
                                                     conn_model, NETWORK, ID,
                                                     dir_path, thr)
        conn_matrix = thresholding.threshold_proportional(
            conn_matrix, float(thr), dir_path)
        conn_matrix = thresholding.normalize(conn_matrix)
    elif dens_thresh is not None:
        [conn_matrix, est_path, edge_threshold,
         thr] = thresholding.density_thresholding(ts_within_parcels,
                                                  conn_model, NETWORK, ID,
                                                  dens_thresh, dir_path)

    if plot_switch == True:
        ##Plot connectogram
        plotting.plot_connectogram(conn_matrix, conn_model, atlas_name,
                                   dir_path, ID, NETWORK, label_names)

        ##Plot adj. matrix based on determined inputs
        atlast_graph_title = plotting.plot_conn_mat(conn_matrix, conn_model,
                                                    atlas_name, dir_path, ID,
                                                    NETWORK, label_names, mask)

        ##Plot connectome viz for all Yeo networks
        if all_nets != False:
            plotting.plot_membership(membership_plotting, conn_matrix,
                                     conn_model, coords, edge_threshold,
                                     atlas_name, dir_path)
        else:
            out_path_fig = dir_path + '/' + ID + '_connectome_viz.png'
            niplot.plot_connectome(conn_matrix,
                                   coords,
                                   title=atlast_graph_title,
                                   edge_threshold=edge_threshold,
                                   node_size=20,
                                   colorbar=True,
                                   output_file=out_path_fig)
    return est_path, thr
Ejemplo n.º 8
0
def network_connectome(input_file, ID, atlas_select, NETWORK, node_size, mask,
                       thr, parlistfile, all_nets, conn_model, dens_thresh,
                       conf, adapt_thresh, plot_switch, bedpostx_dir):
    nilearn_atlases = [
        'atlas_aal', 'atlas_craddock_2012', 'atlas_destrieux_2009'
    ]

    ##Input is nifti file
    func_file = input_file

    ##Test if atlas_select is a nilearn atlas
    if atlas_select in nilearn_atlases:
        atlas = getattr(datasets, 'fetch_%s' % atlas_select)()
        try:
            parlistfile = atlas.maps
            try:
                label_names = atlas.labels
            except:
                label_names = None
            try:
                networks_list = atlas.networks
            except:
                networks_list = None
        except RuntimeError:
            print('Error, atlas fetching failed.')
            sys.exit()

    if parlistfile == None and atlas_select not in nilearn_atlases:
        ##Fetch nilearn atlas coords
        [coords, atlas_name, networks_list,
         label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select)

        if atlas_name == 'Power 2011 atlas':
            ##Reference RSN list
            import pkgutil
            import io
            network_coords_ref = NETWORK + '_coords.csv'
            atlas_coords = pkgutil.get_data("pynets",
                                            "rsnrefs/" + network_coords_ref)
            df = pd.read_csv(io.BytesIO(atlas_coords)).ix[:, 0:4]
            i = 1
            net_coords = []
            ix_labels = []
            for i in range(len(df)):
                #print("ROI Reference #: " + str(i))
                x = int(df.ix[i, 1])
                y = int(df.ix[i, 2])
                z = int(df.ix[i, 3])
                #print("X:" + str(x) + " Y:" + str(y) + " Z:" + str(z))
                net_coords.append((x, y, z))
                ix_labels.append(i)
                i = i + 1
                #print(net_coords)
                label_names = ix_labels
        elif atlas_name == 'Dosenbach 2010 atlas':
            coords = list(tuple(x) for x in coords)

            ##Get coord membership dictionary
            [membership, membership_plotting
             ] = nodemaker.get_mem_dict(func_file, coords, networks_list)

            ##Convert to membership dataframe
            mem_df = membership.to_frame().reset_index()

            nets_avail = list(set(list(mem_df['index'])))
            ##Get network name equivalents
            if NETWORK == 'DMN':
                NETWORK = 'default'
            elif NETWORK == 'FPTC':
                NETWORK = 'fronto-parietal'
            elif NETWORK == 'CON':
                NETWORK = 'cingulo-opercular'
            elif NETWORK not in nets_avail:
                print('Error: ' + NETWORK + ' not available with this atlas!')
                sys.exit()

            ##Get coords for network-of-interest
            mem_df.loc[mem_df['index'] == NETWORK]
            net_coords = mem_df.loc[mem_df['index'] == NETWORK][[0]].values[:,
                                                                            0]
            net_coords = list(tuple(x) for x in net_coords)
            ix_labels = mem_df.loc[mem_df['index'] == NETWORK].index.values
            ####Add code for any special RSN reference lists for the nilearn atlases here#####
            ##If labels_names are not indices and NETWORK is specified, sub-list label names

        if label_names != ix_labels:
            try:
                label_names = label_names.tolist()
            except:
                pass
            label_names = [label_names[i] for i in ix_labels]

        ##Get subject directory path
        dir_path = os.path.dirname(
            os.path.realpath(func_file)) + '/' + atlas_select
        if not os.path.exists(dir_path):
            os.makedirs(dir_path)

        ##If masking, remove those coords that fall outside of the mask
        if mask != None:
            [net_coords,
             label_names] = nodemaker.coord_masker(mask, net_coords,
                                                   label_names)

        ##Save coords and label_names to pickles
        coord_path = dir_path + '/coords_' + NETWORK + '_' + str(thr) + '.pkl'
        with open(coord_path, 'wb') as f:
            pickle.dump(net_coords, f)

        labels_path = dir_path + '/labelnames_' + NETWORK + '_' + str(
            thr) + '.pkl'
        with open(labels_path, 'wb') as f:
            pickle.dump(label_names, f)

        if bedpostx_dir is not None:
            from pynets.diffconnectometry import run_struct_mapping
            FSLDIR = os.environ['FSLDIR']
            try:
                FSLDIR
            except NameError:
                print('FSLDIR environment variable not set!')
            est_path2 = run_struct_mapping(FSLDIR, ID, bedpostx_dir, dir_path,
                                           NETWORK, net_coords, node_size)

    else:
        ##Fetch user-specified atlas coords
        [coords_all, atlas_name,
         par_max] = nodemaker.get_names_and_coords_of_parcels(parlistfile)
        coords = list(tuple(x) for x in coords_all)

        ##Get subject directory path
        dir_path = os.path.dirname(
            os.path.realpath(func_file)) + '/' + atlas_name
        if not os.path.exists(dir_path):
            os.makedirs(dir_path)

        ##Get coord membership dictionary
        try:
            networks_list
        except:
            networks_list = None
        [membership,
         membership_plotting] = nodemaker.get_mem_dict(func_file, coords,
                                                       networks_list)

        ##Convert to membership dataframe
        mem_df = membership.to_frame().reset_index()

        ##Get coords for network-of-interest
        mem_df.loc[mem_df['index'] == NETWORK]
        net_coords = mem_df.loc[mem_df['index'] == NETWORK][[0]].values[:, 0]
        net_coords = list(tuple(x) for x in net_coords)
        ix_labels = mem_df.loc[mem_df['index'] == NETWORK].index.values
        try:
            label_names = [label_names[i] for i in ix_labels]
        except:
            label_names = ix_labels

        if mask != None:
            [net_coords,
             label_names] = nodemaker.coord_masker(mask, net_coords,
                                                   label_names)

        ##Save coords and label_names to pickles
        coord_path = dir_path + '/coords_' + NETWORK + '_' + str(thr) + '.pkl'
        with open(coord_path, 'wb') as f:
            pickle.dump(net_coords, f)

        labels_path = dir_path + '/labelnames_' + NETWORK + '_' + str(
            thr) + '.pkl'
        with open(labels_path, 'wb') as f:
            pickle.dump(label_names, f)

        if bedpostx_dir is not None:
            from pynets.diffconnectometry import run_struct_mapping
            est_path2 = run_struct_mapping(FSLDIR, ID, bedpostx_dir, dir_path,
                                           NETWORK, net_coords, node_size)

        ##Generate network parcels image (through refinement, this could be used
        ##in place of the 3 lines above)
        #net_parcels_img_path = gen_network_parcels(parlistfile, NETWORK, labels)
        #parcellation = nib.load(net_parcels_img_path)
        #parcel_masker = input_data.NiftiLabelsMasker(labels_img=parcellation, background_label=0, memory='nilearn_cache', memory_level=5, standardize=True)
        #ts_within_parcels = parcel_masker.fit_transform(func_file)
        #net_ts = ts_within_parcels

    ##Grow ROIs
    masker = input_data.NiftiSpheresMasker(seeds=net_coords,
                                           radius=float(node_size),
                                           allow_overlap=True,
                                           memory_level=5,
                                           memory='nilearn_cache',
                                           verbose=2,
                                           standardize=True)
    ts_within_spheres = masker.fit_transform(func_file, confounds=conf)
    net_ts = ts_within_spheres

    ##Save time series as txt file
    out_path_ts = dir_path + '/' + ID + '_' + NETWORK + '_net_ts.txt'
    np.savetxt(out_path_ts, net_ts)

    ##Fit connectivity model
    if adapt_thresh is not False:
        if os.path.isfile(est_path2) == True:
            [conn_matrix, est_path, edge_threshold,
             thr] = thresholding.adaptive_thresholding(ts_within_spheres,
                                                       conn_model, NETWORK, ID,
                                                       est_path2, dir_path)
        else:
            print('No structural mx found! Exiting...')
            sys.exit(0)
    elif dens_thresh is None:
        edge_threshold = str(float(thr) * 100) + '%'
        [conn_matrix,
         est_path] = graphestimation.get_conn_matrix(ts_within_spheres,
                                                     conn_model, NETWORK, ID,
                                                     dir_path, thr)
        conn_matrix = thresholding.threshold_proportional(
            conn_matrix, float(thr), dir_path)
        conn_matrix = thresholding.normalize(conn_matrix)
    elif dens_thresh is not None:
        [conn_matrix, est_path, edge_threshold,
         thr] = thresholding.density_thresholding(ts_within_spheres,
                                                  conn_model, NETWORK, ID,
                                                  dens_thresh, dir_path)

    if plot_switch == True:
        ##Plot connectogram
        plotting.plot_connectogram(conn_matrix, conn_model, atlas_name,
                                   dir_path, ID, NETWORK, label_names)

        ##Plot adj. matrix based on determined inputs
        plotting.plot_conn_mat(conn_matrix, conn_model, atlas_name, dir_path,
                               ID, NETWORK, label_names, mask)

        ##Plot network time-series
        plotting.plot_timeseries(net_ts, NETWORK, ID, dir_path, atlas_name,
                                 label_names)

        ##Plot connectome viz for specific Yeo networks
        title = "Connectivity Projected on the " + NETWORK
        out_path_fig = dir_path + '/' + ID + '_' + NETWORK + '_connectome_plot.png'
        niplot.plot_connectome(conn_matrix,
                               net_coords,
                               edge_threshold=edge_threshold,
                               title=title,
                               display_mode='lyrz',
                               output_file=out_path_fig)
    return est_path, thr
Ejemplo n.º 9
0
def wb_connectome_with_nl_atlas_coords(input_file, ID, atlas_select, NETWORK,
                                       node_size, mask, thr, all_nets,
                                       conn_model, dens_thresh, conf,
                                       adapt_thresh, plot_switch,
                                       bedpostx_dir):
    nilearn_atlases = [
        'atlas_aal', 'atlas_craddock_2012', 'atlas_destrieux_2009'
    ]

    ##Input is nifti file
    func_file = input_file

    ##Fetch nilearn atlas coords
    [coords, atlas_name, networks_list,
     label_names] = nodemaker.fetch_nilearn_atlas_coords(atlas_select)

    ##Get subject directory path
    dir_path = os.path.dirname(
        os.path.realpath(func_file)) + '/' + atlas_select
    if not os.path.exists(dir_path):
        os.makedirs(dir_path)

    ##Get coord membership dictionary if all_nets option triggered
    if all_nets != False:
        try:
            networks_list
        except:
            networks_list = None
        [membership,
         membership_plotting] = nodemaker.get_mem_dict(func_file, coords,
                                                       networks_list)

    ##Mask coordinates
    if mask is not None:
        [coords, label_names] = nodemaker.coord_masker(mask, coords,
                                                       label_names)

    ##Save coords and label_names to pickles
    coord_path = dir_path + '/coords_wb_' + str(thr) + '.pkl'
    with open(coord_path, 'wb') as f:
        pickle.dump(coords, f)

    labels_path = dir_path + '/labelnames_wb_' + str(thr) + '.pkl'
    with open(labels_path, 'wb') as f:
        pickle.dump(label_names, f)

    if bedpostx_dir is not None:
        from pynets.diffconnectometry import run_struct_mapping
        FSLDIR = os.environ['FSLDIR']
        try:
            FSLDIR
        except NameError:
            print('FSLDIR environment variable not set!')
        est_path2 = run_struct_mapping(FSLDIR, ID, bedpostx_dir, dir_path,
                                       NETWORK, coords, node_size)

    ##Extract within-spheres time-series from funct file
    spheres_masker = input_data.NiftiSpheresMasker(seeds=coords,
                                                   radius=float(node_size),
                                                   memory='nilearn_cache',
                                                   memory_level=5,
                                                   verbose=2,
                                                   standardize=True)
    ts_within_spheres = spheres_masker.fit_transform(func_file, confounds=conf)
    print('\n' +
          'Time series has {0} samples'.format(ts_within_spheres.shape[0]) +
          '\n')

    ##Save time series as txt file
    out_path_ts = dir_path + '/' + ID + '_whole_brain_ts_within_spheres.txt'
    np.savetxt(out_path_ts, ts_within_spheres)

    ##Fit connectivity model
    if adapt_thresh is not False:
        if os.path.isfile(est_path2) == True:
            [conn_matrix, est_path, edge_threshold,
             thr] = thresholding.adaptive_thresholding(ts_within_spheres,
                                                       conn_model, NETWORK, ID,
                                                       est_path2, dir_path)
        else:
            print('No structural mx found! Exiting...')
            sys.exit(0)
    elif dens_thresh is None:
        edge_threshold = str(float(thr) * 100) + '%'
        [conn_matrix,
         est_path] = graphestimation.get_conn_matrix(ts_within_spheres,
                                                     conn_model, NETWORK, ID,
                                                     dir_path, thr)
        conn_matrix = thresholding.threshold_proportional(
            conn_matrix, float(thr), dir_path)
        conn_matrix = thresholding.normalize(conn_matrix)
    elif dens_thresh is not None:
        [conn_matrix, est_path, edge_threshold,
         thr] = thresholding.density_thresholding(ts_within_spheres,
                                                  conn_model, NETWORK, ID,
                                                  dens_thresh, dir_path)

    if plot_switch == True:
        ##Plot connectogram
        plotting.plot_connectogram(conn_matrix, conn_model, atlas_name,
                                   dir_path, ID, NETWORK, label_names)

        ##Plot adj. matrix based on determined inputs
        plotting.plot_conn_mat(conn_matrix, conn_model, atlas_name, dir_path,
                               ID, NETWORK, label_names, mask)

        ##Plot connectome viz for all Yeo networks
        if all_nets != False:
            plotting.plot_membership(membership_plotting, conn_matrix,
                                     conn_model, coords, edge_threshold,
                                     atlas_name, dir_path)
        else:
            out_path_fig = dir_path + '/' + ID + '_' + atlas_name + '_connectome_viz.png'
            niplot.plot_connectome(conn_matrix,
                                   coords,
                                   title=atlas_name,
                                   edge_threshold=edge_threshold,
                                   node_size=20,
                                   colorbar=True,
                                   output_file=out_path_fig)
    return est_path, thr
Ejemplo n.º 10
0
def extractnetstats(ID,
                    network,
                    thr,
                    conn_model,
                    est_path,
                    roi,
                    prune,
                    node_size,
                    norm,
                    binary,
                    custom_weight=None):
    """
    Function interface for performing fully-automated graph analysis.

    Parameters
    ----------
    ID : str
        A subject id or other unique identifier.
    network : str
        Resting-state network based on Yeo-7 and Yeo-17 naming (e.g. 'Default') used to filter nodes in the study of
        brain subgraphs.
    thr : float
        The value, between 0 and 1, used to threshold the graph using any variety of methods
        triggered through other options.
    conn_model : str
       Connectivity estimation model (e.g. corr for correlation, cov for covariance, sps for precision covariance,
       partcorr for partial correlation). sps type is used by default.
    est_path : str
        File path to the thresholded graph, conn_matrix_thr, saved as a numpy array in .npy format.
    roi : str
        File path to binarized/boolean region-of-interest Nifti1Image file.
    prune : bool
        Indicates whether to prune final graph of disconnected nodes/isolates.
    node_size : int
        Spherical centroid node size in the case that coordinate-based centroids
        are used as ROI's.
    norm : int
        Indicates method of normalizing resulting graph.
    binary : bool
        Indicates whether to binarize resulting graph edges to form an
        unweighted graph.
    custom_weight : float
        The edge attribute that holds the numerical value used as a weight.
        If None, then each edge has weight 1. Default is None.

    Returns
    -------
    out_path : str
        Path to .csv file where graph analysis results are saved.
    """
    import pandas as pd
    import yaml
    try:
        import cPickle as pickle
    except ImportError:
        import _pickle as pickle
    from pathlib import Path
    from pynets import thresholding, utils

    # Advanced options
    fmt = 'edgelist_ssv'
    est_path_fmt = "%s%s" % ('.', est_path.split('.')[-1])

    # Load and threshold matrix
    if est_path_fmt == '.txt':
        in_mat_raw = np.array(np.genfromtxt(est_path))
    else:
        in_mat_raw = np.array(np.load(est_path))

    # De-diagnal
    in_mat = np.array(np.array(thresholding.autofix(in_mat_raw)))

    # Normalize connectivity matrix
    # Force edges to values between 0-1
    if norm == 1:
        in_mat = thresholding.normalize(in_mat)
    # Apply log10
    elif norm == 2:
        in_mat = np.log10(in_mat)
    else:
        pass

    # Correct nan's and inf's
    in_mat[np.isnan(in_mat)] = 0
    in_mat[np.isinf(in_mat)] = 1

    # Get hyperbolic tangent (i.e. fischer r-to-z transform) of matrix if non-covariance
    if (conn_model == 'corr') or (conn_model == 'partcorr'):
        in_mat = np.arctanh(in_mat)

    # Binarize graph
    if binary is True:
        in_mat = thresholding.binarize(in_mat)

    # Get dir_path
    dir_path = os.path.dirname(os.path.realpath(est_path))

    # Load numpy matrix as networkx graph
    G_pre = nx.from_numpy_matrix(in_mat)

    # Prune irrelevant nodes (i.e. nodes who are fully disconnected from the graph and/or those whose betweenness
    # centrality are > 3 standard deviations below the mean)
    if prune == 1:
        [G, _] = prune_disconnected(G_pre)
    elif prune == 2:
        [G, _] = most_important(G_pre)
    else:
        G = G_pre

    # Get corresponding matrix
    in_mat = np.array(nx.to_numpy_matrix(G))

    # Saved pruned
    if (prune != 0) and (prune is not None):
        final_mat_path = "%s%s%s" % (est_path.split(est_path_fmt)[0],
                                     '_pruned_mat', est_path_fmt)
        utils.save_mat(in_mat, final_mat_path, fmt)

    # Print graph summary
    print("%s%.2f%s" % ('\n\nThreshold: ', 100 * float(thr), '%'))
    print("%s%s" % ('Source File: ', est_path))
    info_list = list(nx.info(G).split('\n'))[2:]
    for i in info_list:
        print(i)

    if nx.is_connected(G) is True:
        frag = False
        print('Graph is connected...')
    else:
        frag = True
        print('Warning: Graph is fragmented...\n')

    # Create Length matrix
    mat_len = thresholding.weight_conversion(in_mat, 'lengths')

    # Load numpy matrix as networkx graph
    G_len = nx.from_numpy_matrix(mat_len)

    # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
    # # # # Calculate global and local metrics from graph G # # # #
    # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
    import community
    from networkx.algorithms import degree_assortativity_coefficient, average_clustering, average_shortest_path_length, degree_pearson_correlation_coefficient, graph_number_of_cliques, transitivity, betweenness_centrality, eigenvector_centrality, communicability_betweenness_centrality, clustering, degree_centrality, rich_club_coefficient, sigma
    from pynets.stats.netstats import average_local_efficiency, global_efficiency, participation_coef, participation_coef_sign, diversity_coef_sign
    # For non-nodal scalar metrics from custom functions, add the name of the function to metric_list and add the
    # function (with a G-only input) to the netstats module.
    metric_list_glob = [
        global_efficiency, average_local_efficiency,
        degree_assortativity_coefficient, average_clustering,
        average_shortest_path_length, degree_pearson_correlation_coefficient,
        graph_number_of_cliques, transitivity, sigma
    ]
    metric_list_comm = ['louvain_modularity']
    # with open("%s%s" % (str(Path(__file__).parent), '/global_graph_measures.yaml'), 'r') as stream:
    #     try:
    #         metric_dict_global = yaml.load(stream)
    #         metric_list_global = metric_dict_global['metric_list_global']
    #         print("%s%s%s" % ('\n\nCalculating global measures:\n', metric_list_global, '\n\n'))
    #     except FileNotFoundError:
    #         print('Failed to parse global_graph_measures.yaml')

    with open(
            "%s%s" %
        (str(Path(__file__).parent), '/nodal_graph_measures.yaml'),
            'r') as stream:
        try:
            metric_dict_nodal = yaml.load(stream)
            metric_list_nodal = metric_dict_nodal['metric_list_nodal']
            print("%s%s%s" % ('\n\nCalculating nodal measures:\n',
                              metric_list_nodal, '\n\n'))
        except FileNotFoundError:
            print('Failed to parse nodal_graph_measures.yaml')

    # Note the use of bare excepts in preceding blocks. Typically, this is considered bad practice in python. Here,
    # we are exploiting it intentionally to facilitate uninterrupted, automated graph analysis even when algorithms are
    # undefined. In those instances, solutions are assigned NaN's.

    # Iteratively run functions from above metric list that generate single scalar output
    num_mets = len(metric_list_glob)
    net_met_arr = np.zeros([num_mets, 2], dtype='object')
    j = 0
    for i in metric_list_glob:
        met_name = str(i).split('<function ')[1].split(' at')[0]
        net_met = met_name
        try:
            try:
                net_met_val = raw_mets(G, i, custom_weight)
            except:
                print("%s%s%s" %
                      ('WARNING: ', net_met, ' failed for graph G.'))
                net_met_val = np.nan
        except:
            print("%s%s%s" %
                  ('WARNING: ', str(i), ' is undefined for graph G'))
            net_met_val = np.nan
        net_met_arr[j, 0] = net_met
        net_met_arr[j, 1] = net_met_val
        print(net_met)
        print(str(net_met_val))
        print('\n')
        j = j + 1
    net_met_val_list = list(net_met_arr[:, 1])

    # Create a list of metric names for scalar metrics
    metric_list_names = []
    net_met_val_list_final = net_met_val_list
    for i in net_met_arr[:, 0]:
        metric_list_names.append(i)

    # Run miscellaneous functions that generate multiple outputs
    # Calculate modularity using the Louvain algorithm
    if 'louvain_modularity' in metric_list_comm:
        try:
            ci = community.best_partition(G)
            modularity = community.community_louvain.modularity(ci, G)
            metric_list_names.append('modularity')
            net_met_val_list_final.append(modularity)
        except:
            print('Louvain modularity calculation is undefined for graph G')
            pass

    # Participation Coefficient by louvain community
    if 'participation_coefficient' in metric_list_nodal:
        try:
            if ci is None:
                raise KeyError(
                    'Participation coefficient cannot be calculated for graph G in the absence of a '
                    'community affiliation vector')
            if len(in_mat[in_mat < 0.0]) > 0:
                pc_vector = participation_coef_sign(in_mat, ci)
            else:
                pc_vector = participation_coef(in_mat, ci)
            print(
                '\nExtracting Participation Coefficient vector for all network nodes...'
            )
            pc_vals = list(pc_vector)
            pc_edges = list(range(len(pc_vector)))
            num_edges = len(pc_edges)
            pc_arr = np.zeros([num_edges + 1, 2], dtype='object')
            j = 0
            for i in range(num_edges):
                pc_arr[j, 0] = "%s%s" % (str(pc_edges[j]), '_partic_coef')
                try:
                    pc_arr[j, 1] = pc_vals[j]
                except:
                    print("%s%s%s" %
                          ('Participation coefficient is undefined for node ',
                           str(j), ' of graph G'))
                    pc_arr[j, 1] = np.nan
                j = j + 1
            # Add mean
            pc_arr[num_edges, 0] = 'average_participation_coefficient'
            nonzero_arr_partic_coef = np.delete(pc_arr[:, 1], [0])
            pc_arr[num_edges, 1] = np.mean(nonzero_arr_partic_coef)
            print("%s%s" % ('Mean Participation Coefficient across edges: ',
                            str(pc_arr[num_edges, 1])))
            for i in pc_arr[:, 0]:
                metric_list_names.append(i)
            net_met_val_list_final = net_met_val_list_final + list(pc_arr[:,
                                                                          1])
        except:
            print('Participation coefficient cannot be calculated for graph G')
            pass

    # Diversity Coefficient by louvain community
    if 'diversity_coefficient' in metric_list_nodal:
        try:
            if ci is None:
                raise KeyError(
                    'Diversity coefficient cannot be calculated for graph G in the absence of a community '
                    'affiliation vector')
            [dc_vector, _] = diversity_coef_sign(in_mat, ci)
            print(
                '\nExtracting Diversity Coefficient vector for all network nodes...'
            )
            dc_vals = list(dc_vector)
            dc_edges = list(range(len(dc_vector)))
            num_edges = len(dc_edges)
            dc_arr = np.zeros([num_edges + 1, 2], dtype='object')
            j = 0
            for i in range(num_edges):
                dc_arr[j, 0] = "%s%s" % (str(dc_edges[j]), '_diversity_coef')
                try:
                    dc_arr[j, 1] = dc_vals[j]
                except:
                    print("%s%s%s" %
                          ('Diversity coefficient is undefined for node ',
                           str(j), ' of graph G'))
                    dc_arr[j, 1] = np.nan
                j = j + 1
            # Add mean
            dc_arr[num_edges, 0] = 'average_diversity_coefficient'
            nonzero_arr_diversity_coef = np.delete(dc_arr[:, 1], [0])
            dc_arr[num_edges, 1] = np.mean(nonzero_arr_diversity_coef)
            print("%s%s" % ('Mean Diversity Coefficient across edges: ',
                            str(dc_arr[num_edges, 1])))
            for i in dc_arr[:, 0]:
                metric_list_names.append(i)
            net_met_val_list_final = net_met_val_list_final + list(dc_arr[:,
                                                                          1])
        except:
            print('Diversity coefficient cannot be calculated for graph G')
            pass

    # Local Efficiency
    if 'local_efficiency' in metric_list_nodal:
        try:
            le_vector = local_efficiency(G)
            print(
                '\nExtracting Local Efficiency vector for all network nodes...'
            )
            le_vals = list(le_vector.values())
            le_nodes = list(le_vector.keys())
            num_nodes = len(le_nodes)
            le_arr = np.zeros([num_nodes + 1, 2], dtype='object')
            j = 0
            for i in range(num_nodes):
                le_arr[j, 0] = "%s%s" % (str(le_nodes[j]), '_local_efficiency')
                try:
                    le_arr[j, 1] = le_vals[j]
                except:
                    print(
                        "%s%s%s" % ('Local efficiency is undefined for node ',
                                    str(j), ' of graph G'))
                    le_arr[j, 1] = np.nan
                j = j + 1
            le_arr[num_nodes, 0] = 'average_local_efficiency_nodewise'
            nonzero_arr_le = np.delete(le_arr[:, 1], [0])
            le_arr[num_nodes, 1] = np.mean(nonzero_arr_le)
            print("%s%s" % ('Mean Local Efficiency across nodes: ',
                            str(le_arr[num_nodes, 1])))
            for i in le_arr[:, 0]:
                metric_list_names.append(i)
            net_met_val_list_final = net_met_val_list_final + list(le_arr[:,
                                                                          1])
        except:
            print('Local efficiency cannot be calculated for graph G')
            pass

    # Local Clustering
    if 'local_clustering' in metric_list_nodal:
        try:
            cl_vector = clustering(G)
            print(
                '\nExtracting Local Clustering vector for all network nodes...'
            )
            cl_vals = list(cl_vector.values())
            cl_nodes = list(cl_vector.keys())
            num_nodes = len(cl_nodes)
            cl_arr = np.zeros([num_nodes + 1, 2], dtype='object')
            j = 0
            for i in range(num_nodes):
                cl_arr[j, 0] = "%s%s" % (str(cl_nodes[j]), '_local_clustering')
                try:
                    cl_arr[j, 1] = cl_vals[j]
                except:
                    print(
                        "%s%s%s" % ('Local clustering is undefined for node ',
                                    str(j), ' of graph G'))
                    cl_arr[j, 1] = np.nan
                j = j + 1
            cl_arr[num_nodes, 0] = 'average_local_efficiency_nodewise'
            nonzero_arr_cl = np.delete(cl_arr[:, 1], [0])
            cl_arr[num_nodes, 1] = np.mean(nonzero_arr_cl)
            print("%s%s" % ('Mean Local Clustering across nodes: ',
                            str(cl_arr[num_nodes, 1])))
            for i in cl_arr[:, 0]:
                metric_list_names.append(i)
            net_met_val_list_final = net_met_val_list_final + list(cl_arr[:,
                                                                          1])
        except:
            print('Local clustering cannot be calculated for graph G')
            pass

    # Degree centrality
    if 'degree_centrality' in metric_list_nodal:
        try:
            dc_vector = degree_centrality(G)
            print(
                '\nExtracting Degree Centrality vector for all network nodes...'
            )
            dc_vals = list(dc_vector.values())
            dc_nodes = list(dc_vector.keys())
            num_nodes = len(dc_nodes)
            dc_arr = np.zeros([num_nodes + 1, 2], dtype='object')
            j = 0
            for i in range(num_nodes):
                dc_arr[j,
                       0] = "%s%s" % (str(dc_nodes[j]), '_degree_centrality')
                try:
                    dc_arr[j, 1] = dc_vals[j]
                except:
                    print(
                        "%s%s%s" % ('Degree centrality is undefined for node ',
                                    str(j), ' of graph G'))
                    dc_arr[j, 1] = np.nan
                j = j + 1
            dc_arr[num_nodes, 0] = 'average_degree_cent'
            nonzero_arr_dc = np.delete(dc_arr[:, 1], [0])
            dc_arr[num_nodes, 1] = np.mean(nonzero_arr_dc)
            print("%s%s" % ('Mean Degree Centrality across nodes: ',
                            str(dc_arr[num_nodes, 1])))
            for i in dc_arr[:, 0]:
                metric_list_names.append(i)
            net_met_val_list_final = net_met_val_list_final + list(dc_arr[:,
                                                                          1])
        except:
            print('Degree centrality cannot be calculated for graph G')
            pass

    # Betweenness Centrality
    if 'betweenness_centrality' in metric_list_nodal:
        try:
            bc_vector = betweenness_centrality(G_len, normalized=True)
            print(
                '\nExtracting Betweeness Centrality vector for all network nodes...'
            )
            bc_vals = list(bc_vector.values())
            bc_nodes = list(bc_vector.keys())
            num_nodes = len(bc_nodes)
            bc_arr = np.zeros([num_nodes + 1, 2], dtype='object')
            j = 0
            for i in range(num_nodes):
                bc_arr[j, 0] = "%s%s" % (str(
                    bc_nodes[j]), '_betweenness_centrality')
                try:
                    bc_arr[j, 1] = bc_vals[j]
                except:
                    print("%s%s%s" %
                          ('Betweeness centrality is undefined for node ',
                           str(j), ' of graph G'))
                    bc_arr[j, 1] = np.nan
                j = j + 1
            bc_arr[num_nodes, 0] = 'average_betweenness_centrality'
            nonzero_arr_betw_cent = np.delete(bc_arr[:, 1], [0])
            bc_arr[num_nodes, 1] = np.mean(nonzero_arr_betw_cent)
            print("%s%s" % ('Mean Betweenness Centrality across nodes: ',
                            str(bc_arr[num_nodes, 1])))
            for i in bc_arr[:, 0]:
                metric_list_names.append(i)
            net_met_val_list_final = net_met_val_list_final + list(bc_arr[:,
                                                                          1])
        except:
            print('Betweenness centrality cannot be calculated for graph G')
            pass

    # Eigenvector Centrality
    if 'eigenvector_centrality' in metric_list_nodal:
        try:
            ec_vector = eigenvector_centrality(G, max_iter=1000)
            print(
                '\nExtracting Eigenvector Centrality vector for all network nodes...'
            )
            ec_vals = list(ec_vector.values())
            ec_nodes = list(ec_vector.keys())
            num_nodes = len(ec_nodes)
            ec_arr = np.zeros([num_nodes + 1, 2], dtype='object')
            j = 0
            for i in range(num_nodes):
                ec_arr[j, 0] = "%s%s" % (str(
                    ec_nodes[j]), '_eigenvector_centrality')
                try:
                    ec_arr[j, 1] = ec_vals[j]
                except:
                    print("%s%s%s" %
                          ('Eigenvector centrality is undefined for node ',
                           str(j), ' of graph G'))
                    ec_arr[j, 1] = np.nan
                j = j + 1
            ec_arr[num_nodes, 0] = 'average_eigenvector_centrality'
            nonzero_arr_eig_cent = np.delete(ec_arr[:, 1], [0])
            ec_arr[num_nodes, 1] = np.mean(nonzero_arr_eig_cent)
            print("%s%s" % ('Mean Eigenvector Centrality across nodes: ',
                            str(ec_arr[num_nodes, 1])))
            for i in ec_arr[:, 0]:
                metric_list_names.append(i)
            net_met_val_list_final = net_met_val_list_final + list(ec_arr[:,
                                                                          1])
        except:
            print('Eigenvector centrality cannot be calculated for graph G')
            pass

    # Communicability Centrality
    if 'communicability_centrality' in metric_list_nodal:
        try:
            cc_vector = communicability_betweenness_centrality(G,
                                                               normalized=True)
            print(
                '\nExtracting Communicability Centrality vector for all network nodes...'
            )
            cc_vals = list(cc_vector.values())
            cc_nodes = list(cc_vector.keys())
            num_nodes = len(cc_nodes)
            cc_arr = np.zeros([num_nodes + 1, 2], dtype='object')
            j = 0
            for i in range(num_nodes):
                cc_arr[j, 0] = "%s%s" % (str(
                    cc_nodes[j]), '_communicability_centrality')
                try:
                    cc_arr[j, 1] = cc_vals[j]
                except:
                    print("%s%s%s" %
                          ('Communicability centrality is undefined for node ',
                           str(j), ' of graph G'))
                    cc_arr[j, 1] = np.nan
                j = j + 1
            cc_arr[num_nodes, 0] = 'average_communicability_centrality'
            nonzero_arr_comm_cent = np.delete(cc_arr[:, 1], [0])
            cc_arr[num_nodes, 1] = np.mean(nonzero_arr_comm_cent)
            print("%s%s" % ('Mean Communicability Centrality across nodes: ',
                            str(cc_arr[num_nodes, 1])))
            for i in cc_arr[:, 0]:
                metric_list_names.append(i)
            net_met_val_list_final = net_met_val_list_final + list(cc_arr[:,
                                                                          1])
        except:
            print(
                'Communicability centrality cannot be calculated for graph G')
            pass

    # Rich club coefficient
    if 'rich_club_coefficient' in metric_list_nodal:
        try:
            rc_vector = rich_club_coefficient(G, normalized=True)
            print(
                '\nExtracting Rich Club Coefficient vector for all network nodes...'
            )
            rc_vals = list(rc_vector.values())
            rc_edges = list(rc_vector.keys())
            num_edges = len(rc_edges)
            rc_arr = np.zeros([num_edges + 1, 2], dtype='object')
            j = 0
            for i in range(num_edges):
                rc_arr[j, 0] = "%s%s" % (str(rc_edges[j]), '_rich_club')
                try:
                    rc_arr[j, 1] = rc_vals[j]
                except:
                    print("%s%s%s" %
                          ('Rich club coefficient is undefined for node ',
                           str(j), ' of graph G'))
                    rc_arr[j, 1] = np.nan
                j = j + 1
            # Add mean
            rc_arr[num_edges, 0] = 'average_rich_club_coefficient'
            nonzero_arr_rich_club = np.delete(rc_arr[:, 1], [0])
            rc_arr[num_edges, 1] = np.mean(nonzero_arr_rich_club)
            print("%s%s" % ('Mean Rich Club Coefficient across edges: ',
                            str(rc_arr[num_edges, 1])))
            for i in rc_arr[:, 0]:
                metric_list_names.append(i)
            net_met_val_list_final = net_met_val_list_final + list(rc_arr[:,
                                                                          1])
        except:
            print('Rich club coefficient cannot be calculated for graph G')
            pass

    if roi:
        met_list_picke_path = "%s%s%s%s" % (
            os.path.dirname(os.path.abspath(est_path)), '/net_met_list', "%s" %
            ("%s%s%s" % ('_', network, '_') if network else "_"),
            os.path.basename(roi).split('.')[0])
    else:
        if network:
            met_list_picke_path = "%s%s%s" % (os.path.dirname(
                os.path.abspath(est_path)), '/net_met_list_', network)
        else:
            met_list_picke_path = "%s%s" % (os.path.dirname(
                os.path.abspath(est_path)), '/net_met_list')
    pickle.dump(metric_list_names, open(met_list_picke_path, 'wb'), protocol=2)

    # And save results to csv
    out_path = utils.create_csv_path(ID, network, conn_model, thr, roi,
                                     dir_path, node_size)
    np.savetxt(out_path, net_met_val_list_final, delimiter='\t')

    if frag is True:
        out_path_neat = "%s%s" % (out_path.split('.csv')[0], '_frag_neat.csv')
    else:
        out_path_neat = "%s%s" % (out_path.split('.csv')[0], '_neat.csv')
    df = pd.DataFrame.from_dict(dict(
        zip(metric_list_names, net_met_val_list_final)),
                                orient='index').transpose()
    df.to_csv(out_path_neat, index=False)

    return out_path