def test_link_communities(): base_dir = str(Path(__file__).parent/"examples") in_mat = np.load(base_dir + '/997/997_Default_est_cov_0.1_4.npy') start_time = time.time() M = netstats.link_communities(in_mat, type_clustering='single') print("%s%s%s" % ('thresh_and_fit (Functional, proportional thresholding) --> finished: ', str(np.round(time.time() - start_time, 1)), 's')) assert M is not None
def plot_connectogram(conn_matrix, conn_model, atlas_name, dir_path, ID, NETWORK, label_names): import json from pynets.thresholding import normalize from pathlib import Path from random import sample from string import ascii_uppercase, ascii_lowercase link_comm = True conn_matrix = normalize(conn_matrix) G = nx.from_numpy_matrix(conn_matrix) def doClust(X, clust_levels): ##get the linkage diagram Z = linkage( X, 'ward', ) ##choose # cluster levels cluster_levels = range(1, int(clust_levels)) ##init array to store labels for each level clust_levels_tmp = int(clust_levels) - 1 label_arr = np.zeros((int(clust_levels_tmp), int(X.shape[0]))) ##iterate thru levels for c in cluster_levels: fl = fcluster(Z, c, criterion='maxclust') #print(fl) label_arr[c - 1, :] = fl return label_arr, clust_levels_tmp if NETWORK is not None: clust_levels = 3 [label_arr, clust_levels_tmp] = doClust(conn_matrix, clust_levels) else: if link_comm == True: from pynets.netstats import link_communities #G_lin = nx.line_graph(G) ##Plot link communities node_comm_aff_mat = link_communities(conn_matrix, type_clustering='single') clust_levels = len(node_comm_aff_mat) clust_levels_tmp = int(clust_levels) - 1 mask_mat = np.squeeze( np.array([node_comm_aff_mat == 0]).astype('int')) label_arr = node_comm_aff_mat * np.expand_dims( np.arange(1, clust_levels + 1), axis=1) + mask_mat #else: ##Plot node communities #from pynets.netstats import community_louvain #[ci, q] = community_louvain(conn_matrix, gamma=0.75) #clust_levels = len(np.unique(ci)) #clust_levels_tmp = int(clust_levels) - 1 def get_node_label(node_idx, labels, clust_levels_tmp): def get_letters(n, random=False, uppercase=False): """Return n letters of the alphabet.""" letters = (ascii_uppercase if uppercase else ascii_lowercase) return json.dumps( (sample(letters, n) if random else list(letters[:n]))) abet = get_letters(clust_levels_tmp) node_labels = labels[:, node_idx] return ".".join([ "{}{}".format(abet[i], int(l)) for i, l in enumerate(node_labels) ]) + ".{}".format(label_names[node_idx]) output = [] for node_idx, connections in enumerate(G.adjacency_list()): weight_vec = [] for i in connections: wei = G.get_edge_data(node_idx, int(i))['weight'] #wei = G_lin.get_edge_data(node_idx,int(i))['weight'] weight_vec.append(wei) entry = {} nodes_label = get_node_label(node_idx, label_arr, clust_levels_tmp) entry["name"] = nodes_label entry["size"] = len(connections) entry["imports"] = [ get_node_label(int(d) - 1, label_arr, clust_levels_tmp) for d in connections ] entry["weights"] = weight_vec output.append(entry) if NETWORK != None: json_file_name = str( ID ) + '_' + NETWORK + '_connectogram_' + conn_model + '_network.json' connectogram_plot = dir_path + '/' + json_file_name connectogram_js_sub = dir_path + '/' + str( ID) + '_' + NETWORK + '_connectogram_' + conn_model + '_network.js' connectogram_js_name = str( ID) + '_' + NETWORK + '_connectogram_' + conn_model + '_network.js' else: json_file_name = str(ID) + '_connectogram_' + conn_model + '.json' connectogram_plot = dir_path + '/' + json_file_name connectogram_js_sub = dir_path + '/' + str( ID) + '_connectogram_' + conn_model + '.js' connectogram_js_name = str(ID) + '_connectogram_' + conn_model + '.js' save_json(connectogram_plot, output) ##Copy index.html and json to dir_path #conn_js_path = '/Users/PSYC-dap3463/Applications/PyNets/pynets/connectogram.js' #index_html_path = '/Users/PSYC-dap3463/Applications/PyNets/pynets/index.html' conn_js_path = Path(__file__).parent / "connectogram.js" index_html_path = Path(__file__).parent / "index.html" replacements_html = {'connectogram.js': str(connectogram_js_name)} with open(index_html_path) as infile, open(str(dir_path + '/index.html'), 'w') as outfile: for line in infile: for src, target in replacements_html.items(): line = line.replace(src, target) outfile.write(line) replacements_js = {'template.json': str(json_file_name)} with open(conn_js_path) as infile, open(connectogram_js_sub, 'w') as outfile: for line in infile: for src, target in replacements_js.items(): line = line.replace(src, target) outfile.write(line)
def plot_connectogram(conn_matrix, conn_model, atlas_select, dir_path, ID, network, label_names): import json from pathlib import Path from networkx.readwrite import json_graph from pynets.thresholding import normalize from pynets.netstats import most_important from scipy.cluster.hierarchy import linkage, fcluster from nipype.utils.filemanip import save_json # Advanced Settings comm = 'nodes' pruned = False #color_scheme = 'interpolateCool' #color_scheme = 'interpolateGnBu' #color_scheme = 'interpolateOrRd' #color_scheme = 'interpolatePuRd' #color_scheme = 'interpolateYlOrRd' #color_scheme = 'interpolateReds' #color_scheme = 'interpolateGreens' color_scheme = 'interpolateBlues' # Advanced Settings conn_matrix = normalize(conn_matrix) G = nx.from_numpy_matrix(conn_matrix) if pruned is True: [G, pruned_nodes] = most_important(G) conn_matrix = nx.to_numpy_array(G) pruned_nodes.sort(reverse=True) for j in pruned_nodes: del label_names[label_names.index(label_names[j])] def doClust(X, clust_levels): # get the linkage diagram Z = linkage(X, 'ward') # choose # cluster levels cluster_levels = range(1, int(clust_levels)) # init array to store labels for each level clust_levels_tmp = int(clust_levels) - 1 label_arr = np.zeros((int(clust_levels_tmp), int(X.shape[0]))) # iterate thru levels for c in cluster_levels: fl = fcluster(Z, c, criterion='maxclust') #print(fl) label_arr[c - 1, :] = fl return label_arr, clust_levels_tmp if comm == 'nodes' and len(conn_matrix) > 40: from pynets.netstats import modularity_louvain_und_sign gamma = nx.density(nx.from_numpy_array(conn_matrix)) try: [node_comm_aff_mat, q] = modularity_louvain_und_sign(conn_matrix, gamma=float(gamma)) print("%s%s%s%s%s" % ('Found ', str(len(np.unique(node_comm_aff_mat))), ' communities with γ=', str(gamma), '...')) except: print( 'WARNING: Louvain community detection failed. Proceeding with single community affiliation vector...' ) node_comm_aff_mat = np.ones(conn_matrix.shape[0]).astype('int') clust_levels = len(node_comm_aff_mat) clust_levels_tmp = int(clust_levels) - 1 mask_mat = np.squeeze(np.array([node_comm_aff_mat == 0]).astype('int')) label_arr = node_comm_aff_mat * np.expand_dims( np.arange(1, clust_levels + 1), axis=1) + mask_mat elif comm == 'links' and len(conn_matrix) > 40: from pynets.netstats import link_communities # Plot link communities link_comm_aff_mat = link_communities(conn_matrix, type_clustering='single') print("%s%s%s" % ('Found ', str(len(link_comm_aff_mat)), ' communities...')) clust_levels = len(link_comm_aff_mat) clust_levels_tmp = int(clust_levels) - 1 mask_mat = np.squeeze(np.array([link_comm_aff_mat == 0]).astype('int')) label_arr = link_comm_aff_mat * np.expand_dims( np.arange(1, clust_levels + 1), axis=1) + mask_mat elif len(conn_matrix) > 20: print( 'Graph too small for reliable plotting of communities. Plotting by fcluster instead...' ) if len(conn_matrix) >= 250: clust_levels = 7 elif len(conn_matrix) >= 200: clust_levels = 6 elif len(conn_matrix) >= 150: clust_levels = 5 elif len(conn_matrix) >= 100: clust_levels = 4 elif len(conn_matrix) >= 50: clust_levels = 3 else: clust_levels = 2 [label_arr, clust_levels_tmp] = doClust(conn_matrix, clust_levels) def get_node_label(node_idx, labels, clust_levels_tmp): from collections import OrderedDict def write_roman(num): roman = OrderedDict() roman[1000] = "M" roman[900] = "CM" roman[500] = "D" roman[400] = "CD" roman[100] = "C" roman[90] = "XC" roman[50] = "L" roman[40] = "XL" roman[10] = "X" roman[9] = "IX" roman[5] = "V" roman[4] = "IV" roman[1] = "I" def roman_num(num): for r in roman.keys(): x, y = divmod(num, r) yield roman[r] * x num -= (r * x) if num > 0: roman_num(num) else: break return "".join([a for a in roman_num(num)]) rn_list = [] node_idx = node_idx - 1 node_labels = labels[:, node_idx] for k in [int(l) for i, l in enumerate(node_labels)]: rn_list.append(json.dumps(write_roman(k))) abet = rn_list node_lab_alph = ".".join([ "{}{}".format(abet[i], int(l)) for i, l in enumerate(node_labels) ]) + ".{}".format(label_names[node_idx]) return node_lab_alph output = [] adj_dict = {} for i in list(G.adjacency()): source = list(i)[0] target = list(list(i)[1]) adj_dict[source] = target for node_idx, connections in adj_dict.items(): weight_vec = [] for i in connections: wei = G.get_edge_data(node_idx, int(i))['weight'] weight_vec.append(wei) entry = {} nodes_label = get_node_label(node_idx, label_arr, clust_levels_tmp) entry["name"] = nodes_label entry["size"] = len(connections) entry["imports"] = [ get_node_label(int(d) - 1, label_arr, clust_levels_tmp) for d in connections ] entry["weights"] = weight_vec output.append(entry) if network: json_file_name = "%s%s%s%s%s%s" % (str(ID), '_', network, '_connectogram_', conn_model, '_network.json') json_fdg_file_name = "%s%s%s%s%s%s" % (str(ID), '_', network, '_fdg_', conn_model, '_network.json') connectogram_plot = "%s%s%s" % (dir_path, '/', json_file_name) fdg_js_sub = "%s%s%s%s%s%s%s%s" % (dir_path, '/', str(ID), '_', network, '_fdg_', conn_model, '_network.js') fdg_js_sub_name = "%s%s%s%s%s%s" % (str(ID), '_', network, '_fdg_', conn_model, '_network.js') connectogram_js_sub = "%s%s%s%s%s%s%s%s" % (dir_path, '/', str( ID), '_', network, '_connectogram_', conn_model, '_network.js') connectogram_js_name = "%s%s%s%s%s%s" % ( str(ID), '_', network, '_connectogram_', conn_model, '_network.js') else: json_file_name = "%s%s%s%s" % (str(ID), '_connectogram_', conn_model, '.json') json_fdg_file_name = "%s%s%s%s" % (str(ID), '_fdg_', conn_model, '.json') connectogram_plot = "%s%s%s" % (dir_path, '/', json_file_name) connectogram_js_sub = "%s%s%s%s%s%s" % ( dir_path, '/', str(ID), '_connectogram_', conn_model, '.js') fdg_js_sub = "%s%s%s%s%s%s" % (dir_path, '/', str(ID), '_fdg_', conn_model, '.js') fdg_js_sub_name = "%s%s%s%s" % (str(ID), '_fdg_', conn_model, '.js') connectogram_js_name = "%s%s%s%s" % (str(ID), '_connectogram_', conn_model, '.js') save_json(connectogram_plot, output) # Force-directed graphing G = nx.from_numpy_matrix(np.round(conn_matrix.astype('float64'), 6)) data = json_graph.node_link_data(G) data.pop('directed', None) data.pop('graph', None) data.pop('multigraph', None) for k in range(len(data['links'])): data['links'][k]['value'] = data['links'][k].pop('weight') for k in range(len(data['nodes'])): data['nodes'][k]['id'] = str(data['nodes'][k]['id']) for k in range(len(data['links'])): data['links'][k]['source'] = str(data['links'][k]['source']) data['links'][k]['target'] = str(data['links'][k]['target']) # Add community structure for k in range(len(data['nodes'])): data['nodes'][k]['group'] = str(label_arr[0][k]) # Add node labels for k in range(len(data['nodes'])): data['nodes'][k]['name'] = str(label_names[k]) out_file = "%s%s%s" % (dir_path, '/', str(json_fdg_file_name)) save_json(out_file, data) # Copy index.html and json to dir_path #conn_js_path = '/Users/PSYC-dap3463/Applications/PyNets/pynets/connectogram.js' #index_html_path = '/Users/PSYC-dap3463/Applications/PyNets/pynets/index.html' conn_js_path = str(Path(__file__).parent / "connectogram.js") index_html_path = str(Path(__file__).parent / "index.html") fdg_replacements_js = {"FD_graph.json": str(json_fdg_file_name)} replacements_html = { 'connectogram.js': str(connectogram_js_name), 'fdg.js': str(fdg_js_sub_name) } fdg_js_path = str(Path(__file__).parent / "fdg.js") with open(index_html_path) as infile, open(str(dir_path + '/index.html'), 'w') as outfile: for line in infile: for src, target in replacements_html.items(): line = line.replace(src, target) outfile.write(line) replacements_js = { 'template.json': str(json_file_name), 'interpolateCool': str(color_scheme) } with open(conn_js_path) as infile, open(connectogram_js_sub, 'w') as outfile: for line in infile: for src, target in replacements_js.items(): line = line.replace(src, target) outfile.write(line) with open(fdg_js_path) as infile, open(fdg_js_sub, 'w') as outfile: for line in infile: for src, target in fdg_replacements_js.items(): line = line.replace(src, target) outfile.write(line) return