def test_singleton(self, graph): G = graph() G.add_node("x") assert nx.spectral_ordering(G) == ["x"] G.add_edge("x", "x", weight=33) G.add_edge("x", "x", weight=33) assert nx.spectral_ordering(G) == ["x"]
def test_singleton(self): for graph in (nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph): G = graph() G.add_node('x') assert_equal(nx.spectral_ordering(G), ['x']) G.add_edge('x', 'x', weight=33) G.add_edge('x', 'x', weight=33) assert_equal(nx.spectral_ordering(G), ['x'])
def test_singleton(self): for graph in (nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph): G = graph() G.add_node("x") assert nx.spectral_ordering(G) == ["x"] G.add_edge("x", "x", weight=33) G.add_edge("x", "x", weight=33) assert nx.spectral_ordering(G) == ["x"]
def test_three_nodes(self): G = nx.Graph() G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2), (2, 3, 1)], weight='spam') for method in self._methods: order = nx.spectral_ordering(G, weight='spam', method=method) assert_equal(set(order), set(G)) ok_(set([1, 3]) in (set(order[:-1]), set(order[1:]))) G = nx.MultiDiGraph() G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2), (2, 3, 1), (2, 3, 2)]) for method in self._methods: order = nx.spectral_ordering(G, method=method) assert_equal(set(order), set(G)) ok_(set([2, 3]) in (set(order[:-1]), set(order[1:])))
def test_three_nodes(self): G = nx.Graph() G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2), (2, 3, 1)], weight="spam") for method in self._methods: order = nx.spectral_ordering(G, weight="spam", method=method) assert set(order) == set(G) assert {1, 3} in (set(order[:-1]), set(order[1:])) G = nx.MultiDiGraph() G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2), (2, 3, 1), (2, 3, 2)]) for method in self._methods: order = nx.spectral_ordering(G, method=method) assert set(order) == set(G) assert {2, 3} in (set(order[:-1]), set(order[1:]))
def spectral_populate_subset(G, root, subset_size, gains, threshold): if G.number_of_nodes() <= subset_size: return list(G.nodes()) ordering = nx.spectral_ordering(G) logging.info("Ordering: {}, root: {}, threshold: {}".format( ordering, root, threshold)) left_it = ordering.index(root) - 1 right_it = left_it + 2 res_subset = {root} while len(res_subset) < subset_size: # move two pointers and add encountered vertices if the gain is larger than threshold try: left_cand = ordering[left_it] if gains[left_cand] >= threshold: res_subset.add(left_cand) logging.info("Adding ordering[{}]={} with gain {}".format( left_it, left_cand, gains[left_cand])) left_it -= 1 except IndexError: pass try: right_cand = ordering[right_it] if gains[right_cand] >= threshold: res_subset.add(right_cand) logging.info("Adding ordering[{}]={} with gain {}".format( right_it, right_cand, gains[right_cand])) right_it += 1 except IndexError: pass return list(res_subset)
def test_three_nodes_multigraph(self, method): pytest.importorskip("scipy") G = nx.MultiDiGraph() G.add_weighted_edges_from([(1, 2, 1), (1, 3, 2), (2, 3, 1), (2, 3, 2)]) order = nx.spectral_ordering(G, method=method) assert set(order) == set(G) assert {2, 3} in (set(order[:-1]), set(order[1:]))
def test_cycle(self): path = list(range(10)) G = nx.Graph() nx.add_path(G, path, weight=5) G.add_edge(path[-1], path[0], weight=1) A = nx.laplacian_matrix(G).todense() for normalized in (False, True): for method in methods: try: order = nx.spectral_ordering(G, normalized=normalized, method=method) except nx.NetworkXError as e: if e.args not in ( ("Cholesky solver unavailable.", ), ("LU solver unavailable.", ), ): raise else: if not normalized: assert order in [ [1, 2, 0, 3, 4, 5, 6, 9, 7, 8], [8, 7, 9, 6, 5, 4, 3, 0, 2, 1], ] else: assert order in [ [1, 2, 3, 0, 4, 5, 9, 6, 7, 8], [8, 7, 6, 9, 5, 4, 0, 3, 2, 1], ]
def test_seed_argument(self, method): path = list(range(10)) np.random.shuffle(path) G = nx.Graph() nx.add_path(G, path) order = nx.spectral_ordering(G, method=method, seed=1) assert order in [path, list(reversed(path))]
def test_path(self): path = list(range(10)) shuffle(path) G = nx.Graph() G.add_path(path) for method in self._methods: order = nx.spectral_ordering(G, method=method) ok_(order in [path, list(reversed(path))])
def test_path(self, method): pytest.importorskip("scipy") path = list(range(10)) np.random.shuffle(path) G = nx.Graph() nx.add_path(G, path) order = nx.spectral_ordering(G, method=method) assert order in [path, list(reversed(path))]
def test_cycle(self, normalized, expected_order, method): pytest.importorskip("scipy") path = list(range(10)) G = nx.Graph() nx.add_path(G, path, weight=5) G.add_edge(path[-1], path[0], weight=1) A = nx.laplacian_matrix(G).todense() order = nx.spectral_ordering(G, normalized=normalized, method=method) assert order in expected_order
def test_seed_argument(self, method): # based on setup_class numpy is installed if we get here from numpy.random import shuffle path = list(range(10)) shuffle(path) G = nx.Graph() nx.add_path(G, path) order = nx.spectral_ordering(G, method=method, seed=1) assert order in [path, list(reversed(path))]
def test_path(self): # based on setupClass numpy is installed if we get here from numpy.random import shuffle path = list(range(10)) shuffle(path) G = nx.Graph() nx.add_path(G, path) for method in self._methods: order = nx.spectral_ordering(G, method=method) ok_(order in [path, list(reversed(path))])
def test_disconnected(self): G = nx.Graph() nx.add_path(G, range(0, 10, 2)) nx.add_path(G, range(1, 10, 2)) for method in self._methods: order = nx.spectral_ordering(G, method=method) assert_equal(set(order), set(G)) seqs = [list(range(0, 10, 2)), list(range(8, -1, -2)), list(range(1, 10, 2)), list(range(9, -1, -2))] ok_(order[:5] in seqs) ok_(order[5:] in seqs)
def test_disconnected(self, method): G = nx.Graph() nx.add_path(G, range(0, 10, 2)) nx.add_path(G, range(1, 10, 2)) order = nx.spectral_ordering(G, method=method) assert set(order) == set(G) seqs = [ list(range(0, 10, 2)), list(range(8, -1, -2)), list(range(1, 10, 2)), list(range(9, -1, -2)), ] assert order[:5] in seqs assert order[5:] in seqs
def test_cycle(self, normalized, expected_order, method): path = list(range(10)) G = nx.Graph() nx.add_path(G, path, weight=5) G.add_edge(path[-1], path[0], weight=1) A = nx.laplacian_matrix(G).todense() try: order = nx.spectral_ordering(G, normalized=normalized, method=method) except nx.NetworkXError as e: if e.args not in ( ("Cholesky solver unavailable.",), ("LU solver unavailable.",), ): raise else: assert order in expected_order
def layout_two_column(H, spacing=2): """ Two column (bipartite) layout algorithm. This algorithm first converts the hypergraph into a bipartite graph and then computes connected components. Disonneccted components are handled independently and then stacked together. Within a connected component, the spectral ordering of the bipartite graph provides a quick and dirty ordering that minimizes edge crossings in the diagram. Parameters ---------- H: Hypergraph the entity to be drawn spacing: float amount of whitespace between disconnected components """ offset = 0 pos = {} def stack(vertices, x, height): for i, v in enumerate(vertices): pos[v] = (x, i + offset + (height - len(vertices)) / 2) G = H.bipartite() for ci in nx.connected_components(G): Gi = G.subgraph(ci) key = {v: i for i, v in enumerate(nx.spectral_ordering(Gi))}.get ci_vertices, ci_edges = [ sorted([v for v, d in Gi.nodes(data=True) if d["bipartite"] == j], key=key) for j in [0, 1] ] height = max(len(ci_vertices), len(ci_edges)) stack(ci_vertices, 0, height) stack(ci_edges, 1, height) offset += height + spacing return pos
def main2(): """ spectral same part""" method = 'pyomo' if method == 'dwave': solver = start_sapi() embedding = get_native_embedding(solver) num_reads = 1000 annealing_time = 200 #seed = random.randint(1,10000) #seed = 8834 #print seed random.seed(seed) filename = sys.argv[1] graph = data_to_graph(filename) #print '%i nodes, %i edges' %(nx.number_of_nodes(graph), nx.number_of_edges(graph)) mod_matrix = nx.modularity_matrix(graph, nodelist=sorted(graph.nodes())) nnodes = nx.number_of_nodes(graph) hardware_size = 25 ptn = [1 - 2 * random.randint(0, 1) for _ in range(nnodes)] #init_ptn = [1 for _ in range(nnodes)] #init_ptn = [np.sign(i) for i in nx.fiedler_vector(graph).tolist()] mod = compute_modularity(graph, mod_matrix, ptn) #print 'init modularity:', mod, 0.25*mod/nx.number_of_edges(graph) ptn_variables = {} for node in sorted(graph.nodes()): ptn_variables[node] = ptn[node] sp_order = nx.spectral_ordering(graph) for mynode in sorted(graph.nodes()): free_nodes = spectral_neigh_same_part(mynode, graph, mod_matrix, ptn, hardware_size, sp_order) for node in free_nodes: ptn_variables[node] = 'free' if method == 'dwave': new_ptn = sapi_refine_modularity(graph, solver, hardware_size, ptn_variables, num_reads, annealing_time, embedding) else: new_ptn = pyomo_refine(graph, ptn_variables) for node in free_nodes: ptn_variables[node] = new_ptn[node] mod = compute_modularity(graph, mod_matrix, new_ptn)
def _basic_partitioning(G, n1, n2): """ Genera le 2 classi composte da n1 e n2 nodi a partire da G. La divisione viene effettuata con l'algoritmo spettrale Da usare all'interno di spectral_partitioning(G, class_nodes) :param G: grafo semplice connesso :param n1: nodi della prima classe :param n2: nodi della seconda classe :return: un sottografo, vista di G. La struttura del sottografo non può essere modificata """ # Lista di nodi ordinati secondo il vettore di Fiedler ordered_nodes = nx.spectral_ordering( G, method="lanczos") # Torna una list non un nunmpy array group_test_1 = set(ordered_nodes[:n1]) # primi n1 group_test_2 = set(ordered_nodes[:n2]) # primi n2 cut_size_1 = nx.cut_size(G, group_test_1) cut_size_2 = nx.cut_size(G, group_test_2) # Scelgo la componente che dividerà il grafo in base al peso del suo insieme di taglio if cut_size_1 < cut_size_2: final_group = group_test_1 remaining_group = set(ordered_nodes[n1:]) G_1 = G.subgraph(final_group) G_2 = G.subgraph(remaining_group) else: final_group = group_test_2 # n2 remaining_group = set(ordered_nodes[n2:]) #n1 G_1 = G.subgraph(remaining_group) G_2 = G.subgraph(final_group) # Fuori dall'if non va bene per il caso di spectral_partitioning([9,(5, 4)] # G_1 = G.subgraph(final_group) # G_2 = G.subgraph(remaining_group) # G_1 avrà sempre big_class1_nodes e G_2 avrà sempre big_class2_nodes # print('basic part. primo gruppo ha nodi: ', nx.number_of_nodes(G_1)) # print('basic part. secondo gruppo ha nodi: ', nx.number_of_nodes(G_2)) yield from (G_1, G_2)
def reorder(data, absolute=False, return_corr=False, approx=False, threshold=0, split=True): if data.shape[1] > 6: approx = True modified_corr = corr = pd.DataFrame(squareform( [pearsonr(data[r], data[c])[0] for r, c in combinations(data, 2)]), index=list(data), columns=list(data)).fillna(0) if absolute: modified_corr = modified_corr.abs() modified_corr = modified_corr * (modified_corr >= threshold) if approx: G = nx.from_pandas_adjacency(modified_corr) data = data[nx.spectral_ordering(G)] else: values = modified_corr.values split = int(split == True) def objective(ii): jj = np.roll(ii, 1) return values[ii[split:], jj[split:]].sum() best = max(map(np.array, permutations(range(len(values)))), key=objective) data = data[data.columns[best]] if return_corr: order = list(data) return data, corr.loc[order, order] return data
def test_cycle(self): path = list(range(10)) G = nx.Graph() nx.add_path(G, path, weight=5) G.add_edge(path[-1], path[0], weight=1) A = nx.laplacian_matrix(G).todense() for normalized in (False, True): for method in methods: try: order = nx.spectral_ordering(G, normalized=normalized, method=method) except nx.NetworkXError as e: if e.args not in (('Cholesky solver unavailable.',), ('LU solver unavailable.',)): raise else: if not normalized: ok_(order in [[1, 2, 0, 3, 4, 5, 6, 9, 7, 8], [8, 7, 9, 6, 5, 4, 3, 0, 2, 1]]) else: ok_(order in [[1, 2, 3, 0, 4, 5, 9, 6, 7, 8], [8, 7, 6, 9, 5, 4, 0, 3, 2, 1]])
def get_feature_order(X, eps=.25): A = squareform(pdist(X.T, metric=lambda x, y: pearsonr(x, y)[0])) A[A < eps] = 0 G = nx.relabel_nodes(nx.from_numpy_array(A), {i:s for i,s in enumerate(X.columns)}) return nx.spectral_ordering(G)
def test_spectral_ordering_tracemin_chol(): """Test that "tracemin_chol" raises an exception.""" pytest.importorskip("scipy") G = nx.barbell_graph(5, 4) with pytest.raises(nx.NetworkXError): nx.spectral_ordering(G, method="tracemin_chol")
def main(): #method = 'pyomo' method = 'dwave' free_node_method = 'top_gain' #free_node_method = 'spectral' #free_node_method = 'random' #free_node_method = 'boundary_spectral' #free_node_method = 'tg_sp_same_part' if method == 'dwave': solver = start_sapi() embedding = get_native_embedding(solver) num_reads = 1000 annealing_time = 200 filename = sys.argv[1] graph = data_to_graph(filename) #print graph.nodes() print('%i nodes, %i edges' % (nx.number_of_nodes(graph), nx.number_of_edges(graph))) if nx.number_of_nodes(graph) > 600: exit() mod_matrix = nx.modularity_matrix(graph, nodelist=sorted(graph.nodes())) nnodes = nx.number_of_nodes(graph) hardware_size = 25 #init_ptn = [1 - 2*random.randint(0,1) for _ in range(nnodes)] seeds = [ 1070, 8173, 3509, 8887, 1314, 4506, 5219, 3765, 1420, 7778, 3734, 6509, 1266, 5063, 6496, 4622, 7018, 6052, 8932, 8215, 1254, 400, 3260, 5999, 1331, 8073, 7357, 2928, 7208, 3874 ] niters = [] mod_values = [] for __, seed in enumerate(seeds): #print('exp %i' %__) #seed = 0 random.seed(seed) np.random.seed(seed) init_ptn = [ 1 - 2 * x for x in list( np.random.randint(2, size=(graph.number_of_nodes(), ))) ] #print init_ptn mod = compute_modularity(graph, mod_matrix, init_ptn) #print('init modularity:', mod, 0.25*mod/nx.number_of_edges(graph)) ptn_variables = {} for node in sorted(graph.nodes()): ptn_variables[node] = init_ptn[node] free_nodes = get_random_nodes(graph, hardware_size) free_set = set(free_nodes) sp_order = nx.spectral_ordering(graph) #sp_order = list(reverse_cuthill_mckee_ordering(graph)) #print(sp_order) free_nodes = get_free_nodes(graph, mod_matrix, init_ptn, hardware_size, sp_order, method=free_node_method) not_converge = True myiter = 0 nconv = 5 best_soln = -float('inf') while not_converge: myiter += 1 #print len(free_nodes) for node in free_nodes: ptn_variables[node] = 'free' if method == 'dwave': new_ptn = sapi_refine_modularity(graph, solver, hardware_size, ptn_variables, num_reads, annealing_time, embedding) else: new_ptn = pyomo_refine(graph, ptn_variables) for node in free_nodes: ptn_variables[node] = new_ptn[node] mod = compute_modularity(graph, mod_matrix, new_ptn) #print(myiter, 'refine modularity:', mod, 0.25*mod/nx.number_of_edges(graph)) free_nodes = get_free_nodes(graph, mod_matrix, new_ptn, hardware_size, sp_order, method=free_node_method) current_free_set = set(free_nodes) if mod > best_soln: best_soln = mod best_it = myiter if free_set == current_free_set: not_converge = False elif myiter - best_it >= nconv: not_converge = False free_set = current_free_set niters.append(myiter) mod_values.append(0.25 * mod / nx.number_of_edges(graph)) #print(seed, myiter, 0.25*mod/nx.number_of_edges(graph)) best = max(mod_values) worst = min(mod_values) av = np.mean(mod_values) std = np.std(mod_values) b_it = min(niters) w_it = max(niters) av_it = np.mean(niters) std_it = np.std(niters) #print(seeds) out = [worst, av, best, std, b_it, av_it, w_it, std_it] out = '& '.join([str(round(i, 4)) for i in out]) print(out) print('-------------------\n') '''
sumfile.write(dens) # analyze the network hist = nx.degree_histogram(fb_net) plt.figure(figsize=(10, 10)) plt.plot(hist, linestyle=':') plt.title('Degree Historam') plt.savefig('fbNet_Degree.png') plt.close() print('Degree Historam finished') lap_spec = nx.laplacian_spectrum(fb_net) plt.plot(lap_spec) plt.title('Eigenvalues of the Laplacian') plt.savefig('fbNet_LapSpec.png') plt.close() print('Eigenvalues of the Laplacian') adj_spec = nx.adjacency_spectrum(fb_net) plt.plot(adj_spec) plt.title('Eigenvalues of the Adjaceny') plt.savefig('fbNet_AdjSpec.png') plt.close() print('Eigenvalues of the Adjaceny') spec_ordering = nx.spectral_ordering(fb_net) plt.plot(spec_ordering) plt.title('Spectral Ordering') plt.savefig('fbNet_SpecOrder.png') plt.close() print('Spectral Ordering')
def setup(g, num_players, num_seeds): return nx.spectral_ordering(g)