コード例 #1
0
ファイル: test_plotting.py プロジェクト: ryanhammonds/PyNets
def test_plot_network_clusters(plotting_data, plot_overlaps):
    """ Test plotting network clusters"""

    from pynets.stats.netstats import community_resolution_selection

    temp_file = tempfile.NamedTemporaryFile(mode='w+',
                                            prefix='figure',
                                            suffix='.png')
    fname = str(temp_file.name)

    conn_matrix = plotting_data['conn_matrix']

    G = nx.from_numpy_matrix(np.abs(conn_matrix))
    _, communities, _, _ = community_resolution_selection(G)
    plot_labels = True

    plot_gen.plot_network_clusters(G,
                                   communities,
                                   fname,
                                   plot_overlaps=plot_overlaps,
                                   plot_labels=plot_labels)

    assert os.path.isfile(fname)

    temp_file.close()
コード例 #2
0
def test_community_resolution_selection(sim_num_comms, sim_size):
    """ Test community resolution selection
    Note: It is impossible to enter or cover the second while loop in
          netstats.community_resolution_selection.
    """
    G = nx.caveman_graph(sim_num_comms, sim_size)
    node_ci, ci, resolution, num_comms = netstats.community_resolution_selection(G)

    assert len(node_ci) == len(ci)
    assert num_comms == sim_num_comms
    assert resolution is not None
コード例 #3
0
ファイル: plot_gen.py プロジェクト: devhliu/PyNets
def plot_connectogram(conn_matrix, conn_model, atlas, dir_path, ID, network, labels):
    """
    Plot a connectogram for a given connectivity matrix.

    Parameters
    ----------
    conn_matrix : array
        NxN matrix.
    conn_model : str
       Connectivity estimation model (e.g. corr for correlation, cov for covariance, sps for precision covariance,
       partcorr for partial correlation). sps type is used by default.
    atlas : str
        Name of atlas parcellation used.
    dir_path : str
        Path to directory containing subject derivative data for given run.
    ID : str
        A subject id or other unique identifier.
    network : str
        Resting-state network based on Yeo-7 and Yeo-17 naming (e.g. 'Default') used to filter nodes in the study of
        brain subgraphs.
    labels : list
        List of string labels corresponding to ROI nodes.
    """
    import json
    from pathlib import Path
    from networkx.readwrite import json_graph
    from pynets.core.thresholding import normalize
    from pynets.stats.netstats import most_important
    # from scipy.cluster.hierarchy import linkage, fcluster
    from nipype.utils.filemanip import save_json

    # Advanced Settings
    comm = 'nodes'
    pruned = False
    #color_scheme = 'interpolateCool'
    #color_scheme = 'interpolateGnBu'
    #color_scheme = 'interpolateOrRd'
    #color_scheme = 'interpolatePuRd'
    #color_scheme = 'interpolateYlOrRd'
    #color_scheme = 'interpolateReds'
    #color_scheme = 'interpolateGreens'
    color_scheme = 'interpolateBlues'
    # Advanced Settings

    conn_matrix = normalize(conn_matrix)
    G = nx.from_numpy_matrix(np.abs(conn_matrix))
    if pruned is True:
        [G, pruned_nodes] = most_important(G)
        conn_matrix = nx.to_numpy_array(G)

        pruned_nodes.sort(reverse=True)
        for j in pruned_nodes:
            del labels[labels.index(labels[j])]

    # def _doClust(X, clust_levels):
    #     """
    #     Create Ward cluster linkages.
    #     """
    #     # get the linkage diagram
    #     Z = linkage(X, 'ward')
    #     # choose # cluster levels
    #     cluster_levels = range(1, int(clust_levels))
    #     # init array to store labels for each level
    #     clust_levels_tmp = int(clust_levels) - 1
    #     label_arr = np.zeros((int(clust_levels_tmp), int(X.shape[0])))
    #     # iterate thru levels
    #     for c in cluster_levels:
    #         fl = fcluster(Z, c, criterion='maxclust')
    #         #print(fl)
    #         label_arr[c-1, :] = fl
    #     return label_arr, clust_levels_tmp

    if comm == 'nodes' and len(conn_matrix) > 40:
        from pynets.stats.netstats import community_resolution_selection
        G = nx.from_numpy_matrix(np.abs(conn_matrix))
        _, node_comm_aff_mat, resolution, num_comms = community_resolution_selection(G)
        clust_levels = len(node_comm_aff_mat)
        clust_levels_tmp = int(clust_levels) - 1
        mask_mat = np.squeeze(np.array([node_comm_aff_mat == 0]).astype('int'))
        label_arr = node_comm_aff_mat * np.expand_dims(np.arange(1, clust_levels+1), axis=1) + mask_mat
    elif comm == 'links' and len(conn_matrix) > 40:
        from pynets.stats.netstats import link_communities
        # Plot link communities
        link_comm_aff_mat = link_communities(conn_matrix, type_clustering='single')
        print("%s%s%s" % ('Found ', str(len(link_comm_aff_mat)), ' communities...'))
        clust_levels = len(link_comm_aff_mat)
        clust_levels_tmp = int(clust_levels) - 1
        mask_mat = np.squeeze(np.array([link_comm_aff_mat == 0]).astype('int'))
        label_arr = link_comm_aff_mat * np.expand_dims(np.arange(1, clust_levels+1), axis=1) + mask_mat
    else:
        return
    # elif len(conn_matrix) > 20:
    #     print('Graph too small for reliable plotting of communities. Plotting by fcluster instead...')
    #     if len(conn_matrix) >= 250:
    #         clust_levels = 7
    #     elif len(conn_matrix) >= 200:
    #         clust_levels = 6
    #     elif len(conn_matrix) >= 150:
    #         clust_levels = 5
    #     elif len(conn_matrix) >= 100:
    #         clust_levels = 4
    #     elif len(conn_matrix) >= 50:
    #         clust_levels = 3
    #     else:
    #         clust_levels = 2
    #     [label_arr, clust_levels_tmp] = _doClust(conn_matrix, clust_levels)

    def _get_node_label(node_idx, labels, clust_levels_tmp):
        """
        Tag a label to a given node based on its community/cluster assignment
        """
        from collections import OrderedDict

        def _write_roman(num):
            """
            Create community/cluster assignments using a Roman-Numeral generator.
            """
            roman = OrderedDict()
            roman[1000] = "M"
            roman[900] = "CM"
            roman[500] = "D"
            roman[400] = "CD"
            roman[100] = "C"
            roman[90] = "XC"
            roman[50] = "L"
            roman[40] = "XL"
            roman[10] = "X"
            roman[9] = "IX"
            roman[5] = "V"
            roman[4] = "IV"
            roman[1] = "I"

            def roman_num(num):
                """

                :param num:
                """
                for r in roman.keys():
                    x, y = divmod(num, r)
                    yield roman[r] * x
                    num -= (r * x)
                    if num > 0:
                        roman_num(num)
                    else:
                        break
            return "".join([a for a in roman_num(num)])
        rn_list = []
        node_idx = node_idx - 1
        node_labels = labels[:, node_idx]
        for k in [int(l) for i, l in enumerate(node_labels)]:
            rn_list.append(json.dumps(_write_roman(k)))
        abet = rn_list
        node_lab_alph = ".".join(["{}{}".format(abet[i], int(l)) for i, l in enumerate(node_labels)]) + ".{}".format(
            labels[node_idx])
        return node_lab_alph

    output = []

    adj_dict = {}
    for i in list(G.adjacency()):
        source = list(i)[0]
        target = list(list(i)[1])
        adj_dict[source] = target

    for node_idx, connections in adj_dict.items():
        weight_vec = []
        for i in connections:
            wei = G.get_edge_data(node_idx,int(i))['weight']
            weight_vec.append(wei)
        entry = {}
        nodes_label = _get_node_label(node_idx, label_arr, clust_levels_tmp)
        entry["name"] = nodes_label
        entry["size"] = len(connections)
        entry["imports"] = [_get_node_label(int(d)-1, label_arr, clust_levels_tmp) for d in connections]
        entry["weights"] = weight_vec
        output.append(entry)

    if network:
        json_file_name = "%s%s%s%s%s%s" % (str(ID), '_', network, '_connectogram_', conn_model, '_network.json')
        json_fdg_file_name = "%s%s%s%s%s%s" % (str(ID), '_', network, '_fdg_', conn_model, '_network.json')
        connectogram_plot = "%s%s%s" % (dir_path, '/', json_file_name)
        fdg_js_sub = "%s%s%s%s%s%s%s%s" % (dir_path, '/', str(ID), '_', network, '_fdg_', conn_model, '_network.js')
        fdg_js_sub_name = "%s%s%s%s%s%s" % (str(ID), '_', network, '_fdg_', conn_model, '_network.js')
        connectogram_js_sub = "%s%s%s%s%s%s%s%s" % (dir_path, '/', str(ID), '_', network, '_connectogram_', conn_model,
                                                    '_network.js')
        connectogram_js_name = "%s%s%s%s%s%s" % (str(ID), '_', network, '_connectogram_', conn_model, '_network.js')
    else:
        json_file_name = "%s%s%s%s" % (str(ID), '_connectogram_', conn_model, '.json')
        json_fdg_file_name = "%s%s%s%s" % (str(ID), '_fdg_', conn_model, '.json')
        connectogram_plot = "%s%s%s" % (dir_path, '/', json_file_name)
        connectogram_js_sub = "%s%s%s%s%s%s" % (dir_path, '/', str(ID), '_connectogram_', conn_model, '.js')
        fdg_js_sub = "%s%s%s%s%s%s" % (dir_path, '/', str(ID), '_fdg_', conn_model, '.js')
        fdg_js_sub_name = "%s%s%s%s" % (str(ID), '_fdg_', conn_model, '.js')
        connectogram_js_name = "%s%s%s%s" % (str(ID), '_connectogram_', conn_model, '.js')
    save_json(connectogram_plot, output)

    # Force-directed graphing
    G = nx.from_numpy_matrix(np.round(np.abs(conn_matrix).astype('float64'), 6))
    data = json_graph.node_link_data(G)
    data.pop('directed', None)
    data.pop('graph', None)
    data.pop('multigraph', None)
    for k in range(len(data['links'])):
        data['links'][k]['value'] = data['links'][k].pop('weight')
    for k in range(len(data['nodes'])):
        data['nodes'][k]['id'] = str(data['nodes'][k]['id'])
    for k in range(len(data['links'])):
        data['links'][k]['source'] = str(data['links'][k]['source'])
        data['links'][k]['target'] = str(data['links'][k]['target'])

    # Add community structure
    for k in range(len(data['nodes'])):
        data['nodes'][k]['group'] = str(label_arr[0][k])

    # Add node labels
    for k in range(len(data['nodes'])):
        data['nodes'][k]['name'] = str(labels[k])

    out_file = "%s%s%s" % (dir_path, '/', str(json_fdg_file_name))
    save_json(out_file, data)

    # Copy index.html and json to dir_path
    conn_js_path = str(Path(__file__).parent/"connectogram.js")
    index_html_path = str(Path(__file__).parent/"index.html")
    fdg_replacements_js = {"FD_graph.json": str(json_fdg_file_name)}
    replacements_html = {'connectogram.js': str(connectogram_js_name), 'fdg.js': str(fdg_js_sub_name)}
    fdg_js_path = str(Path(__file__).parent/"fdg.js")
    with open(index_html_path) as infile, open(str(dir_path + '/index.html'), 'w') as outfile:
        for line in infile:
            for src, target in replacements_html.items():
                line = line.replace(src, target)
            outfile.write(line)

    replacements_js = {'template.json': str(json_file_name), 'interpolateCool': str(color_scheme)}
    with open(conn_js_path) as infile, open(connectogram_js_sub, 'w') as outfile:
        for line in infile:
            for src, target in replacements_js.items():
                line = line.replace(src, target)
            outfile.write(line)

    with open(fdg_js_path) as infile, open(fdg_js_sub, 'w') as outfile:
        for line in infile:
            for src, target in fdg_replacements_js.items():
                line = line.replace(src, target)
            outfile.write(line)

    return
コード例 #4
0
def plot_conn_mat_struct(conn_matrix, conn_model, atlas, dir_path, ID, network,
                         labels, roi, thr, node_size, target_samples,
                         track_type, directget, max_length):
    """
    API for selecting among various structural connectivity matrix plotting approaches.

    Parameters
    ----------
    conn_matrix : array
        NxN matrix.
    conn_model : str
       Connectivity estimation model (e.g. corr for correlation, cov for covariance, sps for precision covariance,
       partcorr for partial correlation). sps type is used by default.
    atlas : str
        Name of atlas parcellation used.
    dir_path : str
        Path to directory containing subject derivative data for given run.
    ID : str
        A subject id or other unique identifier.
    network : str
        Resting-state network based on Yeo-7 and Yeo-17 naming (e.g. 'Default') used to filter nodes in the study of
        brain subgraphs.
    labels : list
        List of string labels corresponding to ROI nodes.
    roi : str
        File path to binarized/boolean region-of-interest Nifti1Image file.
    thr : float
        A value, between 0 and 1, to threshold the graph using any variety of methods
        triggered through other options.
    node_size : int
        Spherical centroid node size in the case that coordinate-based centroids
        are used as ROI's.
    target_samples : int
        Total number of streamline samples specified to generate streams.
    track_type : str
        Tracking algorithm used (e.g. 'local' or 'particle').
    directget : str
        The statistical approach to tracking. Options are: det (deterministic), closest (clos), boot (bootstrapped),
        and prob (probabilistic).
    max_length : int
        Maximum fiber length threshold in mm to restrict tracking.
    """
    from pynets.plotting import plot_graphs
    import networkx as nx
    import os.path as op
    out_path_fig = "%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s" % (
        dir_path, '/', ID, '_modality-dwi_', '%s' %
        ("%s%s%s" %
         ('rsn-', network, '_') if network is not None else ''), '%s' %
        ("%s%s%s" % ('roi-', op.basename(roi).split('.')[0], '_')
         if roi is not None else ''), 'est-', conn_model, '_', '%s' %
        ("%s%s%s" % ('nodetype-spheres-', node_size, 'mm_') if
         ((node_size != 'parc') and
          (node_size is not None)) else 'nodetype-parc_'), "%s" %
        ("%s%s%s" % ('samples-', int(target_samples), 'streams_')
         if float(target_samples) > 0 else '_'), 'tt-', track_type, '_dg-',
        directget, '_ml-', max_length, '_thr-', thr, '_adj_mat.png')
    plot_graphs.plot_conn_mat(conn_matrix, labels, out_path_fig)

    # Plot community adj. matrix
    try:
        from pynets.stats.netstats import community_resolution_selection
        G = nx.from_numpy_matrix(np.abs(conn_matrix))
        _, node_comm_aff_mat, resolution, num_comms = community_resolution_selection(
            G)
        out_path_fig_comm = "%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s" % (
            dir_path, '/', ID, '_modality-dwi_', '%s' %
            ("%s%s%s" %
             ('rsn-', network, '_') if network is not None else ''), '%s' %
            ("%s%s%s" % ('roi-', op.basename(roi).split('.')[0], '_')
             if roi is not None else ''), 'est-', conn_model, '_', '%s' %
            ("%s%s%s" % ('nodetype-spheres-', node_size, 'mm_') if
             ((node_size != 'parc') and
              (node_size is not None)) else 'nodetype-parc_'), "%s" %
            ("%s%s%s" % ('samples-', int(target_samples), 'streams_')
             if float(target_samples) > 0 else '_'), 'tt-', track_type, '_dg-',
            directget, '_ml-', max_length, '_thr-', thr, '_adj_mat_comm.png')
        plot_graphs.plot_community_conn_mat(conn_matrix, labels,
                                            out_path_fig_comm,
                                            node_comm_aff_mat)
    except:
        print(
            '\nWARNING: Louvain community detection failed. Cannot plot community matrix...'
        )

    return
コード例 #5
0
def plot_conn_mat_func(conn_matrix, conn_model, atlas, dir_path, ID, network,
                       labels, roi, thr, node_size, smooth, c_boot, hpass):
    """
    API for selecting among various functional connectivity matrix plotting approaches.

    Parameters
    ----------
    conn_matrix : array
        NxN matrix.
    conn_model : str
       Connectivity estimation model (e.g. corr for correlation, cov for covariance, sps for precision covariance,
       partcorr for partial correlation). sps type is used by default.
    atlas : str
        Name of atlas parcellation used.
    dir_path : str
        Path to directory containing subject derivative data for given run.
    ID : str
        A subject id or other unique identifier.
    network : str
        Resting-state network based on Yeo-7 and Yeo-17 naming (e.g. 'Default') used to filter nodes in the study of
        brain subgraphs.
    labels : list
        List of string labels corresponding to ROI nodes.
    roi : str
        File path to binarized/boolean region-of-interest Nifti1Image file.
    thr : float
        A value, between 0 and 1, to threshold the graph using any variety of methods
        triggered through other options.
    node_size : int
        Spherical centroid node size in the case that coordinate-based centroids
        are used as ROI's.
    smooth : int
        Smoothing width (mm fwhm) to apply to time-series when extracting signal from ROI's.
    c_boot : int
        Number of bootstraps if user specified circular-block bootstrapped resampling of the node-extracted time-series.
    hpass : bool
        High-pass filter values (Hz) to apply to node-extracted time-series.
    """
    import networkx as nx
    import os.path as op
    from pynets.plotting import plot_graphs

    out_path_fig = "%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s" % (
        dir_path, '/', ID, '_modality-func_', '%s' %
        ("%s%s%s" %
         ('rsn-', network, '_') if network is not None else ''), '%s' %
        ("%s%s%s" % ('roi-', op.basename(roi).split('.')[0], '_')
         if roi is not None else ''), 'est-', conn_model, '_', '%s' %
        ("%s%s%s" % ('nodetype-spheres-', node_size, 'mm_') if
         ((node_size != 'parc') and
          (node_size is not None)) else 'nodetype-parc_'), "%s" %
        ("%s%s%s" %
         ('boot-', int(c_boot), 'iter_') if float(c_boot) > 0 else ''), "%s" %
        ("%s%s%s" %
         ('smooth-', smooth, 'fwhm_') if float(smooth) > 0 else ''), "%s" %
        ("%s%s%s" % ('hpass-', hpass, 'Hz_') if hpass is not None else ''),
        '_thr-', thr, '_adj_mat.png')

    plot_graphs.plot_conn_mat(conn_matrix, labels, out_path_fig)

    # Plot community adj. matrix
    try:
        from pynets.stats.netstats import community_resolution_selection
        G = nx.from_numpy_matrix(np.abs(conn_matrix))
        _, node_comm_aff_mat, resolution, num_comms = community_resolution_selection(
            G)
        out_path_fig_comm = "%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s" % (
            dir_path, '/', ID, '_modality-func_', '%s' %
            ("%s%s%s" %
             ('rsn-', network, '_') if network is not None else ''), '%s' %
            ("%s%s%s" % ('roi-', op.basename(roi).split('.')[0], '_')
             if roi is not None else ''), 'est-', conn_model, '_', '%s' %
            ("%s%s%s" % ('nodetype-spheres-', node_size, 'mm_') if
             ((node_size != 'parc') and
              (node_size is not None)) else 'nodetype-parc_'), "%s" %
            ("%s%s%s" %
             ('boot-', int(c_boot), 'iter_') if float(c_boot) > 0 else ''),
            "%s" %
            ("%s%s%s" %
             ('smooth-', smooth, 'fwhm_') if float(smooth) > 0 else ''), "%s" %
            ("%s%s%s" % ('hpass-', hpass, 'Hz_') if hpass is not None else ''),
            '_thr-', thr, '_adj_mat_comm.png')
        plot_graphs.plot_community_conn_mat(conn_matrix, labels,
                                            out_path_fig_comm,
                                            node_comm_aff_mat)
    except:
        print(
            '\nWARNING: Louvain community detection failed. Cannot plot community matrix...'
        )

    return
コード例 #6
0
def plot_conn_mat_struct(conn_matrix, conn_model, atlas, dir_path, ID, network,
                         labels, roi, thr, node_size, target_samples,
                         track_type, directget, min_length, error_margin):
    """
    API for selecting among various structural connectivity matrix plotting
    approaches.

    Parameters
    ----------
    conn_matrix : array
        NxN matrix.
    conn_model : str
       Connectivity estimation model (e.g. corr for correlation, cov for
       covariance, sps for precision covariance, partcorr for partial
       correlation). sps type is used by default.
    atlas : str
        Name of atlas parcellation used.
    dir_path : str
        Path to directory containing subject derivative data for given run.
    ID : str
        A subject id or other unique identifier.
    network : str
        Resting-state network based on Yeo-7 and Yeo-17 naming
        (e.g. 'Default') used to filter nodes in the study of brain subgraphs.
    labels : list
        List of string labels corresponding to ROI nodes.
    roi : str
        File path to binarized/boolean region-of-interest Nifti1Image file.
    thr : float
        A value, between 0 and 1, to threshold the graph using any variety of
        methods triggered through other options.
    node_size : int
        Spherical centroid node size in the case that coordinate-based
        centroids are used as ROI's.
    target_samples : int
        Total number of streamline samples specified to generate streams.
    track_type : str
        Tracking algorithm used (e.g. 'local' or 'particle').
    directget : str
        The statistical approach to tracking. Options are:
        det (deterministic), closest (clos), boot (bootstrapped), and prob
        (probabilistic).
    min_length : int
        Minimum fiber length threshold in mm to restrict tracking.
    """
    import matplotlib.pyplot as plt
    from pynets.core.utils import load_runconfig
    import sys
    from pynets.plotting import plot_graphs
    import networkx as nx
    import os.path as op

    out_path_fig = \
        "%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s" % \
        (dir_path,
         "/adjacency_",
         ID,
         "_modality-dwi_",
         "%s" % ("%s%s%s" % ("rsn-",
                             network,
                             "_") if network is not None else ""),
         "%s" % ("%s%s%s" % ("roi-",
                             op.basename(roi).split(".")[0],
                             "_") if roi is not None else ""),
         "model-",
         conn_model,
         "_",
         "%s" % ("%s%s%s" % ("nodetype-spheres-",
                             node_size,
                             "mm_") if (
             (node_size != "parc") and (
                 node_size is not None)) else "nodetype-parc_"),
         "%s" % ("%s%s%s" % ("samples-",
                             int(target_samples),
                             "streams_") if float(target_samples) > 0
                 else "_"),
         "tracktype-",
         track_type,
         "_directget-",
         directget,
         "_minlength-",
         min_length,
         "_tol-",
         error_margin,
         "_thr-",
         thr,
         ".png",
         )

    hardcoded_params = load_runconfig()
    try:
        cmap_name = hardcoded_params["plotting"]["structural"]["adjacency"][
            "color_theme"][0]
    except KeyError as e:
        print(
            e, "Plotting configuration not successfully extracted from"
            " runconfig.yaml")

    plot_graphs.plot_conn_mat(conn_matrix,
                              labels,
                              out_path_fig,
                              cmap=plt.get_cmap(cmap_name))

    # Plot community adj. matrix
    try:
        from pynets.stats.netstats import community_resolution_selection

        G = nx.from_numpy_matrix(np.abs(conn_matrix))
        _, node_comm_aff_mat, resolution, num_comms = \
            community_resolution_selection(G)
        out_path_fig_comm = \
            "%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s" \
            % (dir_path,
               "/adjacency-communities_",
               ID,
               "_modality-dwi_",
               "%s" % ("%s%s%s" % ("rsn-",
                                   network,
                                   "_") if network is not None else ""),
               "%s" % ("%s%s%s" % ("roi-",
                                   op.basename(roi).split(".")[0],
                                   "_") if roi is not None else ""),
               "model-",
               conn_model,
               "_",
               "%s" % ("%s%s%s" % ("nodetype-spheres-",
                                   node_size,
                                   "mm_") if (
                   (node_size != "parc") and (
                       node_size is not None)) else "nodetype-parc_"),
               "%s" % ("%s%s%s" % ("samples-",
                                   int(target_samples),
                                   "streams_") if float(target_samples) > 0
                       else "_"),
               "tracktype-",
               track_type,
               "_directget-",
               directget,
               "_minlength-",
               min_length,
               "_tol-",
               error_margin,
               "_thr-",
               thr,
               ".png",
               )
        plot_graphs.plot_community_conn_mat(
            conn_matrix,
            labels,
            out_path_fig_comm,
            node_comm_aff_mat,
            cmap=plt.get_cmap(cmap_name),
        )
    except BaseException:
        print("\nWARNING: Louvain community detection failed. Cannot plot"
              " community matrix...")

    return
コード例 #7
0
def plot_conn_mat_func(
    conn_matrix,
    conn_model,
    atlas,
    dir_path,
    ID,
    network,
    labels,
    roi,
    thr,
    node_size,
    smooth,
    hpass,
    extract_strategy,
):
    """
    API for selecting among various functional connectivity matrix plotting
    approaches.

    Parameters
    ----------
    conn_matrix : array
        NxN matrix.
    conn_model : str
       Connectivity estimation model (e.g. corr for correlation, cov for
       covariance, sps for precision covariance, partcorr for partial
       correlation). sps type is used by default.
    atlas : str
        Name of atlas parcellation used.
    dir_path : str
        Path to directory containing subject derivative data for given run.
    ID : str
        A subject id or other unique identifier.
    network : str
        Resting-state network based on Yeo-7 and Yeo-17 naming (e.g.
        'Default') used to filter nodes in the study of brain subgraphs.
    labels : list
        List of string labels corresponding to ROI nodes.
    roi : str
        File path to binarized/boolean region-of-interest Nifti1Image file.
    thr : float
        A value, between 0 and 1, to threshold the graph using any variety of
        methods triggered through other options.
    node_size : int
        Spherical centroid node size in the case that coordinate-based
        centroids are used as ROI's.
    smooth : int
        Smoothing width (mm fwhm) to apply to time-series when extracting
        signal from ROI's.
    hpass : bool
        High-pass filter values (Hz) to apply to node-extracted time-series.
    extract_strategy : str
        The name of a valid function used to reduce the time-series region
        extraction.
    """
    import matplotlib.pyplot as plt
    from pynets.core.utils import load_runconfig
    import sys
    import networkx as nx
    import os.path as op
    from pynets.plotting import plot_graphs

    out_path_fig = \
        "%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s" % \
        (dir_path,
         "/adjacency_",
         ID,
         "_modality-func_",
         "%s" % ("%s%s%s" % ("rsn-",
                             network,
                             "_") if network is not None else ""),
         "%s" % ("%s%s%s" % ("roi-",
                             op.basename(roi).split(".")[0],
                             "_") if roi is not None else ""),
         "model-",
         conn_model,
         "_",
         "%s" % ("%s%s%s" % ("nodetype-spheres-",
                             node_size,
                             "mm_") if (
             (node_size != "parc") and (
                 node_size is not None)) else "nodetype-parc_"),
         "%s" % ("%s%s%s" % ("smooth-",
                             smooth,
                             "fwhm_") if float(smooth) > 0 else ""),
         "%s" % ("%s%s%s" % ("hpass-",
                             hpass,
                             "Hz_") if hpass is not None else ""),
         "%s" % ("%s%s%s" % ("extract-",
                             extract_strategy,
                             "") if extract_strategy is not None else ""),
         "_thr-",
         thr,
         ".png",
         )

    hardcoded_params = load_runconfig()
    try:
        cmap_name = hardcoded_params["plotting"]["functional"]["adjacency"][
            "color_theme"][0]
    except KeyError as e:
        print(
            e, "Plotting configuration not successfully extracted from"
            " runconfig.yaml")

    plot_graphs.plot_conn_mat(conn_matrix,
                              labels,
                              out_path_fig,
                              cmap=plt.get_cmap(cmap_name))

    # Plot community adj. matrix
    try:
        from pynets.stats.netstats import community_resolution_selection

        G = nx.from_numpy_matrix(np.abs(conn_matrix))
        _, node_comm_aff_mat, resolution, num_comms = \
            community_resolution_selection(G)
        out_path_fig_comm = \
            "%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s" % \
            (dir_path,
             "/adjacency-communities_",
             ID,
             "_modality-func_",
             "%s" % ("%s%s%s" % ("rsn-",
                                 network,
                                 "_") if network is not None else ""),
             "%s" % ("%s%s%s" % ("roi-",
                                 op.basename(roi).split(".")[0],
                                 "_") if roi is not None else ""),
             "model-",
             conn_model,
             "_",
             "%s" % ("%s%s%s" % ("nodetype-spheres-",
                                 node_size,
                                 "mm_") if (
                 (node_size != "parc") and (
                     node_size is not None)) else "nodetype-parc_"),
             "%s" % ("%s%s%s" % ("smooth-",
                                 smooth,
                                 "fwhm_") if float(smooth) > 0 else ""),
             "%s" % ("%s%s%s" % ("hpass-",
                                 hpass,
                                 "Hz_") if hpass is not None else ""),
             "%s" % ("%s%s%s" % ("extract-",
                                 extract_strategy,
                                 "") if extract_strategy is not None else ""),
             "_thr-",
             thr,
             ".png",
             )
        plot_graphs.plot_community_conn_mat(
            conn_matrix,
            labels,
            out_path_fig_comm,
            node_comm_aff_mat,
            cmap=plt.get_cmap(cmap_name),
        )
    except BaseException:
        print("\nWARNING: Louvain community detection failed. Cannot plot "
              "community matrix...")

    return
コード例 #8
0
ファイル: netmotifs.py プロジェクト: neurolibre/PyNets
def motif_matching(paths,
                   ID,
                   atlas,
                   namer_dir,
                   name_list,
                   metadata_list,
                   multigraph_list_all,
                   graph_path_list_all,
                   rsn=None):
    import networkx as nx
    import numpy as np
    import glob
    from pynets.core import thresholding
    from pynets.stats.netmotifs import compare_motifs
    from sklearn.metrics.pairwise import cosine_similarity
    from pynets.stats.netstats import community_resolution_selection
    try:
        import cPickle as pickle
    except ImportError:
        import _pickle as pickle

    [struct_graph_path, func_graph_path] = paths
    struct_mat = np.load(struct_graph_path)
    func_mat = np.load(func_graph_path)

    if rsn is not None:
        struct_coords_path = glob.glob(
            f"{str(Path(struct_graph_path).parent.parent)}/nodes/{rsn}_coords_rsn.pkl"
        )[0]
        func_coords_path = glob.glob(
            f"{str(Path(func_graph_path).parent.parent)}/nodes/{rsn}_coords_rsn.pkl"
        )[0]
        struct_labels_path = glob.glob(
            f"{str(Path(struct_graph_path).parent.parent)}/nodes/{rsn}_labels_rsn.pkl"
        )[0]
        func_labels_path = glob.glob(
            f"{str(Path(func_graph_path).parent.parent)}/nodes/{rsn}_labels_rsn.pkl"
        )[0]
    else:
        struct_coords_path = glob.glob(
            f"{str(Path(struct_graph_path).parent.parent)}/nodes/*coords.pkl"
        )[0]
        func_coords_path = glob.glob(
            f"{str(Path(func_graph_path).parent.parent)}/nodes/*coords.pkl")[0]
        struct_labels_path = glob.glob(
            f"{str(Path(struct_graph_path).parent.parent)}/nodes/*labels.pkl"
        )[0]
        func_labels_path = glob.glob(
            f"{str(Path(func_graph_path).parent.parent)}/nodes/*labels.pkl")[0]

    with open(struct_coords_path, 'rb') as file_:
        struct_coords = pickle.load(file_)
    with open(func_coords_path, 'rb') as file_:
        func_coords = pickle.load(file_)
    with open(struct_labels_path, 'rb') as file_:
        struct_labels = pickle.load(file_)
    with open(func_labels_path, 'rb') as file_:
        func_labels = pickle.load(file_)

    if func_mat.shape == struct_mat.shape:
        func_mat[~struct_mat.astype('bool')] = 0
        struct_mat[~func_mat.astype('bool')] = 0
        print("Number of edge disagreements after matching: ",
              sum(sum(abs(func_mat - struct_mat))))

        metadata = {}
        assert len(struct_coords) == len(struct_labels) == len(
            func_coords) == len(func_labels) == func_mat.shape[0]
        metadata['coords'] = struct_coords
        metadata['labels'] = struct_labels
        metadata_list.append(metadata)

        struct_mat = np.maximum(struct_mat, struct_mat.T)
        func_mat = np.maximum(func_mat, func_mat.T)
        struct_mat = thresholding.standardize(struct_mat)
        func_mat = thresholding.standardize(func_mat)

        struct_node_comm_aff_mat = community_resolution_selection(
            nx.from_numpy_matrix(np.abs(struct_mat)))[1]

        func_node_comm_aff_mat = community_resolution_selection(
            nx.from_numpy_matrix(np.abs(func_mat)))[1]

        struct_comms = []
        for i in np.unique(struct_node_comm_aff_mat):
            struct_comms.append(struct_node_comm_aff_mat == i)

        func_comms = []
        for i in np.unique(func_node_comm_aff_mat):
            func_comms.append(func_node_comm_aff_mat == i)

        sims = cosine_similarity(struct_comms, func_comms)
        struct_comm = struct_comms[np.argmax(sims, axis=0)[0]]
        func_comm = func_comms[np.argmax(sims, axis=0)[0]]

        comm_mask = np.equal.outer(struct_comm, func_comm).astype(bool)
        struct_mat[~comm_mask] = 0
        func_mat[~comm_mask] = 0
        struct_name = struct_graph_path.split('/')[-1].split('_raw.npy')[0]
        func_name = func_graph_path.split('/')[-1].split('_raw.npy')[0]
        name = f"{ID}_{atlas}_mplx_Layer-1_{struct_name}_Layer-2_{func_name}"
        name_list.append(name)
        struct_mat = np.maximum(struct_mat, struct_mat.T)
        func_mat = np.maximum(func_mat, func_mat.T)
        [mldict, g_dict] = compare_motifs(struct_mat, func_mat, name,
                                          namer_dir)
        multigraph_list_all.append(list(mldict.values())[0])
        graph_path_list = []
        for thr in list(g_dict.keys()):
            multigraph_path_list_dict = {}
            [struct, func] = g_dict[thr]
            struct_out = f"{namer_dir}/struct_{atlas}_{struct_name}.npy"
            func_out = f"{namer_dir}/struct_{atlas}_{func_name}_motif-{thr}.npy"
            np.save(struct_out, struct)
            np.save(func_out, func)
            multigraph_path_list_dict[f"struct_{atlas}_{thr}"] = struct_out
            multigraph_path_list_dict[f"func_{atlas}_{thr}"] = func_out
            graph_path_list.append(multigraph_path_list_dict)
        graph_path_list_all.append(graph_path_list)
    else:
        print(
            f"Skipping {rsn} rsn, since structural and functional graphs are not identical shapes."
        )

    return name_list, metadata_list, multigraph_list_all, graph_path_list_all
コード例 #9
0
def plot_conn_mat_func(conn_matrix, conn_model, atlas, dir_path, ID, network,
                       labels, roi, thr, node_size, smooth, hpass,
                       extract_strategy):
    """
    API for selecting among various functional connectivity matrix plotting approaches.

    Parameters
    ----------
    conn_matrix : array
        NxN matrix.
    conn_model : str
       Connectivity estimation model (e.g. corr for correlation, cov for covariance, sps for precision covariance,
       partcorr for partial correlation). sps type is used by default.
    atlas : str
        Name of atlas parcellation used.
    dir_path : str
        Path to directory containing subject derivative data for given run.
    ID : str
        A subject id or other unique identifier.
    network : str
        Resting-state network based on Yeo-7 and Yeo-17 naming (e.g. 'Default') used to filter nodes in the study of
        brain subgraphs.
    labels : list
        List of string labels corresponding to ROI nodes.
    roi : str
        File path to binarized/boolean region-of-interest Nifti1Image file.
    thr : float
        A value, between 0 and 1, to threshold the graph using any variety of methods
        triggered through other options.
    node_size : int
        Spherical centroid node size in the case that coordinate-based centroids
        are used as ROI's.
    smooth : int
        Smoothing width (mm fwhm) to apply to time-series when extracting signal from ROI's.
    hpass : bool
        High-pass filter values (Hz) to apply to node-extracted time-series.
    extract_strategy : str
        The name of a valid function used to reduce the time-series region extraction.
    """
    import matplotlib.pyplot as plt
    import pkg_resources
    import yaml
    import sys
    import networkx as nx
    import os.path as op
    from pynets.plotting import plot_graphs

    out_path_fig = "%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s" % (
        dir_path, '/', ID, '_modality-func_', '%s' %
        ("%s%s%s" %
         ('rsn-', network, '_') if network is not None else ''), '%s' %
        ("%s%s%s" % ('roi-', op.basename(roi).split('.')[0], '_')
         if roi is not None else ''), 'est-', conn_model, '_', '%s' %
        ("%s%s%s" % ('nodetype-spheres-', node_size, 'mm_') if
         ((node_size != 'parc') and
          (node_size is not None)) else 'nodetype-parc_'), "%s" %
        ("%s%s%s" %
         ('smooth-', smooth, 'fwhm_') if float(smooth) > 0 else ''), "%s" %
        ("%s%s%s" %
         ('hpass-', hpass, 'Hz_') if hpass is not None else ''), "%s" %
        ("%s%s%s" % ('extract-', extract_strategy, '_') if extract_strategy
         is not None else ''), '_thr-', thr, '_adj_mat.png')

    with open(pkg_resources.resource_filename("pynets", "runconfig.yaml"),
              'r') as stream:
        hardcoded_params = yaml.load(stream)
        try:
            cmap_name = hardcoded_params['plotting']['functional'][
                'adjacency']['color_theme'][0]
        except KeyError:
            print(
                'ERROR: Plotting configuration not successfully extracted from runconfig.yaml'
            )
            sys.exit(0)
    stream.close()

    plot_graphs.plot_conn_mat(conn_matrix,
                              labels,
                              out_path_fig,
                              cmap=plt.get_cmap(cmap_name))

    # Plot community adj. matrix
    try:
        from pynets.stats.netstats import community_resolution_selection
        G = nx.from_numpy_matrix(np.abs(conn_matrix))
        _, node_comm_aff_mat, resolution, num_comms = community_resolution_selection(
            G)
        out_path_fig_comm = "%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s" % (
            dir_path, '/', ID, '_modality-func_', '%s' %
            ("%s%s%s" %
             ('rsn-', network, '_') if network is not None else ''), '%s' %
            ("%s%s%s" % ('roi-', op.basename(roi).split('.')[0], '_')
             if roi is not None else ''), 'est-', conn_model, '_', '%s' %
            ("%s%s%s" % ('nodetype-spheres-', node_size, 'mm_') if
             ((node_size != 'parc') and
              (node_size is not None)) else 'nodetype-parc_'), "%s" %
            ("%s%s%s" %
             ('smooth-', smooth, 'fwhm_') if float(smooth) > 0 else ''), "%s" %
            ("%s%s%s" %
             ('hpass-', hpass, 'Hz_') if hpass is not None else ''), "%s" %
            ("%s%s%s" % ('extract-', extract_strategy, '_') if extract_strategy
             is not None else ''), '_thr-', thr, '_adj_mat_comm.png')
        plot_graphs.plot_community_conn_mat(conn_matrix,
                                            labels,
                                            out_path_fig_comm,
                                            node_comm_aff_mat,
                                            cmap=plt.get_cmap(cmap_name))
    except:
        print(
            '\nWARNING: Louvain community detection failed. Cannot plot community matrix...'
        )

    return
コード例 #10
0
def motif_matching(
    paths,
    ID,
    atlas,
    namer_dir,
    name_list,
    metadata_list,
    multigraph_list_all,
    graph_path_list_all,
    rsn=None,
):
    import networkx as nx
    import numpy as np
    import glob
    import pickle
    from pynets.core import thresholding
    from pynets.stats.netmotifs import compare_motifs
    from sklearn.metrics.pairwise import cosine_similarity
    from pynets.stats.netstats import community_resolution_selection
    from graspy.utils import remove_loops, symmetrize, get_lcc
    from pynets.core.nodemaker import get_brainnetome_node_attributes

    [struct_graph_path, func_graph_path] = paths
    struct_mat = np.load(struct_graph_path)
    func_mat = np.load(func_graph_path)

    [struct_coords, struct_labels, struct_label_intensities] = \
        get_brainnetome_node_attributes(glob.glob(
        f"{str(Path(struct_graph_path).parent.parent)}/nodes/*.json"),
        struct_mat.shape[0])

    [func_coords, func_labels, func_label_intensities] = \
        get_brainnetome_node_attributes(glob.glob(
        f"{str(Path(func_graph_path).parent.parent)}/nodes/*.json"),
        func_mat.shape[0])

    # Find intersecting nodes across modalities (i.e. assuming the same
    # parcellation, but accomodating for the possibility of dropped nodes)
    diff1 = list(set(struct_label_intensities) - set(func_label_intensities))
    diff2 = list(set(func_label_intensities) - set(struct_label_intensities))
    G_struct = nx.from_numpy_array(struct_mat)
    G_func = nx.from_numpy_array(func_mat)

    bad_idxs = []
    for val in diff1:
        bad_idxs.append(struct_label_intensities.index(val))
        bad_idxs = sorted(list(set(bad_idxs)), reverse=True)
        if type(struct_coords) is np.ndarray:
            struct_coords = list(tuple(x) for x in struct_coords)
    for j in bad_idxs:
        G_struct.remove_node(j)
        print(f"Removing: {(struct_labels[j], struct_coords[j])}...")
        del struct_labels[j], struct_coords[j]

    bad_idxs = []
    for val in diff2:
        bad_idxs.append(func_label_intensities.index(val))
        bad_idxs = sorted(list(set(bad_idxs)), reverse=True)
        if type(func_coords) is np.ndarray:
            func_coords = list(tuple(x) for x in func_coords)
    for j in bad_idxs:
        G_func.remove_node(j)
        print(f"Removing: {(func_labels[j], func_coords[j])}...")
        del func_labels[j], func_coords[j]

    struct_mat = nx.to_numpy_array(G_struct)
    func_mat = nx.to_numpy_array(G_func)

    struct_mat = thresholding.autofix(symmetrize(remove_loops(struct_mat)))

    func_mat = thresholding.autofix(symmetrize(remove_loops(func_mat)))

    if func_mat.shape == struct_mat.shape:
        func_mat[~struct_mat.astype("bool")] = 0
        struct_mat[~func_mat.astype("bool")] = 0
        print(
            "Edge disagreements after matching: ",
            sum(sum(abs(func_mat - struct_mat))),
        )

        metadata = {}
        assert (
            len(struct_coords)
            == len(struct_labels)
            == len(func_coords)
            == len(func_labels)
            == func_mat.shape[0]
        )
        metadata["coords"] = struct_coords
        metadata["labels"] = struct_labels
        metadata_list.append(metadata)

        struct_mat = np.maximum(struct_mat, struct_mat.T)
        func_mat = np.maximum(func_mat, func_mat.T)
        struct_mat = thresholding.standardize(struct_mat)
        func_mat = thresholding.standardize(func_mat)

        struct_node_comm_aff_mat = community_resolution_selection(
            nx.from_numpy_matrix(np.abs(struct_mat))
        )[1]

        func_node_comm_aff_mat = community_resolution_selection(
            nx.from_numpy_matrix(np.abs(func_mat))
        )[1]

        struct_comms = []
        for i in np.unique(struct_node_comm_aff_mat):
            struct_comms.append(struct_node_comm_aff_mat == i)

        func_comms = []
        for i in np.unique(func_node_comm_aff_mat):
            func_comms.append(func_node_comm_aff_mat == i)

        sims = cosine_similarity(struct_comms, func_comms)
        try:
            struct_comm = struct_comms[np.argmax(sims, axis=0)[0]]
        except BaseException:
            print('Matching by structural communities failed...')
            struct_comm = struct_mat
        try:
            func_comm = func_comms[np.argmax(sims, axis=0)[0]]
        except BaseException:
            print('Matching by functional communities failed...')
            func_comm = func_mat

        comm_mask = np.equal.outer(struct_comm, func_comm).astype(bool)

        try:
            assert comm_mask.shape == struct_mat.shape == func_mat.shape
        except AssertionError as e:
            e.args += (comm_mask, comm_mask.shape, struct_mat,
                       struct_mat.shape, func_mat, func_mat.shape)

        try:
            struct_mat[~comm_mask] = 0
        except BaseException:
            print('Skipping community masking...')
        try:
            func_mat[~comm_mask] = 0
        except BaseException:
            print('Skipping community masking...')

        struct_name = struct_graph_path.split("/rawgraph_"
                                              )[-1].split(".npy")[0]
        func_name = func_graph_path.split("/rawgraph_")[-1].split(".npy")[0]
        name = f"sub-{ID}_{atlas}_mplx_Layer-1_{struct_name}_" \
               f"Layer-2_{func_name}"
        name_list.append(name)
        struct_mat = np.maximum(struct_mat, struct_mat.T)
        func_mat = np.maximum(func_mat, func_mat.T)
        try:
            [mldict, g_dict] = compare_motifs(
                struct_mat, func_mat, name, namer_dir)
        except BaseException:
            print(f"Adaptive thresholding by motif comparisons failed "
                  f"for {name}. This usually happens when no motifs are found")
            return [], [], [], []

        multigraph_list_all.append(list(mldict.values())[0])
        graph_path_list = []
        for thr in list(g_dict.keys()):
            multigraph_path_list_dict = {}
            [struct, func] = g_dict[thr]
            struct_out = f"{namer_dir}/struct_{atlas}_{struct_name}.npy"
            func_out = f"{namer_dir}/struct_{atlas}_{func_name}_" \
                       f"motif-{thr}.npy"
            np.save(struct_out, struct)
            np.save(func_out, func)
            multigraph_path_list_dict[f"struct_{atlas}_{thr}"] = struct_out
            multigraph_path_list_dict[f"func_{atlas}_{thr}"] = func_out
            graph_path_list.append(multigraph_path_list_dict)
        graph_path_list_all.append(graph_path_list)
    else:
        print(
            f"Skipping {rsn} rsn, since structural and functional graphs are "
            f"not identical shapes."
        )

    return name_list, metadata_list, multigraph_list_all, graph_path_list_all
コード例 #11
0
def create_gb_palette(mat,
                      edge_cmap,
                      coords,
                      labels,
                      node_size='auto',
                      node_cmap=None,
                      prune=True):
    """
    Create conectome color palatte based on topography.

    Parameters
    ----------
    mat : array
        NxN matrix.
    edge_cmap: colormap
        colormap used for representing the weight of the edges.
    coords : list
        List of (x, y, z) tuples corresponding to an a-priori defined set (e.g. a coordinate atlas).
    labels : list
        List of string labels corresponding to ROI nodes.
    node_size : int
        Spherical centroid node size in the case that coordinate-based centroids
        are used as ROI's.
    node_size: scalar or array_like
        size(s) of the nodes in points^2.
    node_cmap: colormap
        colormap used for representing the community assignment of the nodes.
    """
    import random
    import seaborn as sns
    import networkx as nx
    from pynets.core import thresholding
    from matplotlib import colors
    from sklearn.preprocessing import minmax_scale
    from pynets.stats.netstats import community_resolution_selection, prune_disconnected

    mat = np.array(np.array(thresholding.autofix(mat)))
    if prune is True:
        [G,
         pruned_nodes] = prune_disconnected(nx.from_numpy_matrix(np.abs(mat)))
        pruned_nodes.sort(reverse=True)
        coords_pre = list(coords)
        labels_pre = list(labels)
        if len(pruned_nodes) > 0:
            for j in pruned_nodes:
                del labels_pre[j], coords_pre[j]
            mat = nx.to_numpy_array(G)
            labels = labels_pre
            coords = coords_pre
        else:
            print('No nodes to prune for plotting...')
    else:
        G = nx.from_numpy_matrix(np.abs(mat))

    # Node centralities
    try:
        node_centralities = list(
            nx.algorithms.eigenvector_centrality_numpy(
                G, weight='weight').values())
    except:
        node_centralities = len(coords) * [1]
    max_node_size = (1 / mat.shape[0] *
                     1e3 if node_size == 'auto' else node_size)
    node_sizes = np.array(
        minmax_scale(node_centralities, feature_range=(1, max_node_size)))

    # Node communities
    _, node_comm_aff_mat, resolution, num_comms = community_resolution_selection(
        G)

    # Path lengths
    edge_lengths = []
    for edge_dict in [i[1] for i in nx.all_pairs_shortest_path_length(G)]:
        edge_lengths.extend(list(edge_dict.values()))

    edge_sizes = np.array(minmax_scale(edge_lengths, feature_range=(0.5, 2)))

    # Nodes
    if not node_cmap:
        # Generate as many randomly distinct colors as num_comms
        def random_color(n):
            ret = []
            r = int(random.random() * 256)
            g = int(random.random() * 256)
            b = int(random.random() * 256)
            step = 256 / n
            for i in range(n):
                r += step
                g += step
                b += step
                r = int(r) % 256
                g = int(g) % 256
                b = int(b) % 256
                ret.append((r, g, b))
            return ret

        flatui = [
            '#{:02x}{:02x}{:02x}'.format(i[0], i[1], i[2])
            for i in random_color(num_comms)
        ]

        try:
            ls_cmap = colors.LinearSegmentedColormap.from_list(
                node_comm_aff_mat, sns.color_palette(flatui,
                                                     n_colors=num_comms))
            matplotlib.cm.register_cmap("community", ls_cmap)
            clust_pal = sns.color_palette("community", n_colors=mat.shape[0])
        except:
            clust_pal = sns.color_palette("Set2", n_colors=mat.shape[0])
    else:
        clust_pal = sns.color_palette(node_cmap, n_colors=mat.shape[0])
    clust_pal_nodes = colors.to_rgba_array(clust_pal)

    # Edges
    z_min = np.percentile(mat[mat > 0], 10)
    z_max = np.percentile(mat[mat > 0], 90)
    edge_cmap_pl = sns.color_palette(edge_cmap)
    clust_pal_edges = colors.ListedColormap(edge_cmap_pl.as_hex())

    return mat, clust_pal_edges, clust_pal_nodes, node_sizes, edge_sizes, z_min, z_max, coords, labels
コード例 #12
0
ファイル: plot_gen.py プロジェクト: ShreyasFadnavis/PyNets
def plot_connectogram(conn_matrix,
                      conn_model,
                      atlas,
                      dir_path,
                      ID,
                      network,
                      labels,
                      comm='nodes',
                      color_scheme='interpolateBlues',
                      prune=False):
    """
    Plot a connectogram for a given connectivity matrix.

    Parameters
    ----------
    conn_matrix : array
        NxN matrix.
    conn_model : str
       Connectivity estimation model (e.g. corr for correlation, cov for covariance, sps for precision covariance,
       partcorr for partial correlation). sps type is used by default.
    atlas : str
        Name of atlas parcellation used.
    dir_path : str
        Path to directory containing subject derivative data for given run.
    ID : str
        A subject id or other unique identifier.
    network : str
        Resting-state network based on Yeo-7 and Yeo-17 naming (e.g. 'Default') used to filter nodes in the study of
        brain subgraphs.
    labels : list
        List of string labels corresponding to ROI nodes.
    comm : str, optional default: 'nodes'
        Communitity setting, either 'nodes' or 'links'
    color_scheme : str, optional, default: 'interpolateBlues'
        Color scheme in json.
    prune : bool
        Indicates whether to prune final graph of disconnected nodes/isolates.

    """
    import json
    from pathlib import Path
    from networkx.readwrite import json_graph
    from pynets.core.thresholding import normalize
    from pynets.stats.netstats import most_important
    # from scipy.cluster.hierarchy import linkage, fcluster
    from nipype.utils.filemanip import save_json

    conn_matrix = normalize(conn_matrix)
    G = nx.from_numpy_matrix(np.abs(conn_matrix))
    if prune is True:
        [G, pruned_nodes] = most_important(G)
        conn_matrix = nx.to_numpy_array(G)

        pruned_nodes.sort(reverse=True)
        for j in pruned_nodes:
            del labels[labels.index(labels[j])]

    if comm == 'nodes' and len(conn_matrix) > 40:
        from pynets.stats.netstats import community_resolution_selection
        G = nx.from_numpy_matrix(np.abs(conn_matrix))
        _, node_comm_aff_mat, resolution, num_comms = community_resolution_selection(
            G)
        clust_levels = len(node_comm_aff_mat)
        clust_levels_tmp = int(clust_levels) - 1
        mask_mat = np.squeeze(np.array([node_comm_aff_mat == 0]).astype('int'))
        label_arr = node_comm_aff_mat * np.expand_dims(
            np.arange(1, clust_levels + 1), axis=1) + mask_mat
    elif comm == 'links' and len(conn_matrix) > 40:
        from pynets.stats.netstats import link_communities
        # Plot link communities
        link_comm_aff_mat = link_communities(conn_matrix,
                                             type_clustering='single')[0]
        print(f"{'Found '}{str(len(link_comm_aff_mat))}{' communities...'}")
        clust_levels = len(link_comm_aff_mat)
        clust_levels_tmp = int(clust_levels) - 1
        mask_mat = np.squeeze(np.array([link_comm_aff_mat == 0]).astype('int'))
        label_arr = link_comm_aff_mat * np.expand_dims(
            np.arange(1, clust_levels + 1), axis=1) + mask_mat
    else:
        return

    def _get_node_label(node_idx, labels, clust_levels_tmp):
        """
        Tag a label to a given node based on its community/cluster assignment
        """
        from collections import OrderedDict

        def _write_roman(num):
            """
            Create community/cluster assignments using a Roman-Numeral generator.
            """
            roman = OrderedDict()
            roman[1000] = "M"
            roman[900] = "CM"
            roman[500] = "D"
            roman[400] = "CD"
            roman[100] = "C"
            roman[90] = "XC"
            roman[50] = "L"
            roman[40] = "XL"
            roman[10] = "X"
            roman[9] = "IX"
            roman[5] = "V"
            roman[4] = "IV"
            roman[1] = "I"

            def roman_num(num):
                """

                :param num:
                """
                for r in roman.keys():
                    x, y = divmod(num, r)
                    yield roman[r] * x
                    num -= (r * x)
                    if num > 0:
                        roman_num(num)
                    else:
                        break

            return "".join([a for a in roman_num(num)])

        rn_list = []
        node_idx = node_idx - 1
        node_labels = labels[:, node_idx]
        for k in [int(l) for i, l in enumerate(node_labels)]:
            rn_list.append(json.dumps(_write_roman(k)))
        abet = rn_list
        node_lab_alph = ".".join([
            "{}{}".format(abet[i], int(l)) for i, l in enumerate(node_labels)
        ]) + ".{}".format(labels[node_idx])
        return node_lab_alph

    output = []

    adj_dict = {}
    for i in list(G.adjacency()):
        source = list(i)[0]
        target = list(list(i)[1])
        adj_dict[source] = target

    for node_idx, connections in adj_dict.items():
        weight_vec = []
        for i in connections:
            wei = G.get_edge_data(node_idx, int(i))['weight']
            weight_vec.append(wei)
        entry = {}
        nodes_label = _get_node_label(node_idx, label_arr, clust_levels_tmp)
        entry["name"] = nodes_label
        entry["size"] = len(connections)
        entry["imports"] = [
            _get_node_label(int(d) - 1, label_arr, clust_levels_tmp)
            for d in connections
        ]
        entry["weights"] = weight_vec
        output.append(entry)

    if network:
        json_file_name = f"{str(ID)}{'_'}{network}{'_connectogram_'}{conn_model}{'_network.json'}"
        json_fdg_file_name = f"{str(ID)}{'_'}{network}{'_fdg_'}{conn_model}{'_network.json'}"
        connectogram_plot = f"{dir_path}{'/'}{json_file_name}"
        fdg_js_sub = f"{dir_path}{'/'}{str(ID)}{'_'}{network}{'_fdg_'}{conn_model}{'_network.js'}"
        fdg_js_sub_name = f"{str(ID)}{'_'}{network}{'_fdg_'}{conn_model}{'_network.js'}"
        connectogram_js_sub = f"{dir_path}/{str(ID)}_{network}_connectogram_{conn_model}_network.js"
        connectogram_js_name = f"{str(ID)}{'_'}{network}{'_connectogram_'}{conn_model}{'_network.js'}"
    else:
        json_file_name = f"{str(ID)}{'_connectogram_'}{conn_model}{'.json'}"
        json_fdg_file_name = f"{str(ID)}{'_fdg_'}{conn_model}{'.json'}"
        connectogram_plot = f"{dir_path}{'/'}{json_file_name}"
        connectogram_js_sub = f"{dir_path}{'/'}{str(ID)}{'_connectogram_'}{conn_model}{'.js'}"
        fdg_js_sub = f"{dir_path}{'/'}{str(ID)}{'_fdg_'}{conn_model}{'.js'}"
        fdg_js_sub_name = f"{str(ID)}{'_fdg_'}{conn_model}{'.js'}"
        connectogram_js_name = f"{str(ID)}{'_connectogram_'}{conn_model}{'.js'}"
    save_json(connectogram_plot, output)

    # Force-directed graphing
    G = nx.from_numpy_matrix(np.round(
        np.abs(conn_matrix).astype('float64'), 6))
    data = json_graph.node_link_data(G)
    data.pop('directed', None)
    data.pop('graph', None)
    data.pop('multigraph', None)
    for k in range(len(data['links'])):
        data['links'][k]['value'] = data['links'][k].pop('weight')
    for k in range(len(data['nodes'])):
        data['nodes'][k]['id'] = str(data['nodes'][k]['id'])
    for k in range(len(data['links'])):
        data['links'][k]['source'] = str(data['links'][k]['source'])
        data['links'][k]['target'] = str(data['links'][k]['target'])

    # Add community structure
    for k in range(len(data['nodes'])):
        data['nodes'][k]['group'] = str(label_arr[0][k])

    # Add node labels
    for k in range(len(data['nodes'])):
        data['nodes'][k]['name'] = str(labels[k])

    out_file = f"{dir_path}{'/'}{str(json_fdg_file_name)}"
    save_json(out_file, data)

    # Copy index.html and json to dir_path
    conn_js_path = str(Path(__file__).parent / "connectogram.js")
    index_html_path = str(Path(__file__).parent / "index.html")
    fdg_replacements_js = {"FD_graph.json": str(json_fdg_file_name)}
    replacements_html = {
        'connectogram.js': str(connectogram_js_name),
        'fdg.js': str(fdg_js_sub_name)
    }
    fdg_js_path = str(Path(__file__).parent / "fdg.js")
    with open(index_html_path) as infile, open(str(dir_path + '/index.html'),
                                               'w') as outfile:
        for line in infile:
            for src, target in replacements_html.items():
                line = line.replace(src, target)
            outfile.write(line)

    replacements_js = {
        'template.json': str(json_file_name),
        'interpolateCool': str(color_scheme)
    }
    with open(conn_js_path) as infile, open(connectogram_js_sub,
                                            'w') as outfile:
        for line in infile:
            for src, target in replacements_js.items():
                line = line.replace(src, target)
            outfile.write(line)

    with open(fdg_js_path) as infile, open(fdg_js_sub, 'w') as outfile:
        for line in infile:
            for src, target in fdg_replacements_js.items():
                line = line.replace(src, target)
            outfile.write(line)

    return