def get_components(self): comps = nx.connected_components(self.FG) component_map = dict() components = [] nonstuck = set() component_node_seen = dict() for comp in comps: bry = nx.node_boundary(self.G, comp) comp_set = set() bry_set = set() for node in comp: comp_set.add(node) component_node_seen[node] = 1 for node in bry: bry_set.add(node) component_node_seen[node] = 1 components.append((comp_set, bry_set)) _nonstuck = comp | bry for node in _nonstuck: component_map.setdefault(node, list()).append((comp, bry)) nonstuck |= _nonstuck # stuck = set(self.G.nodes)-nonstuck ns_bry = nx.node_boundary(self.G, nonstuck) for stuck_node in ns_bry: stuck_node_bry = nx.node_boundary(self.G, [stuck_node]) component_map.setdefault(stuck_node, list()).append( (set(stuck_node), stuck_node_bry)) stuck = set( [node for node in self.G if node not in component_node_seen]) if stuck: components.append((set(), stuck)) return component_map, components
def test_null_node_boundary(self): """null nxgraph has empty node boundaries""" null=self.null assert_equal(nx.node_boundary(null,[]),[]) assert_equal(nx.node_boundary(null,[],[]),[]) assert_equal(nx.node_boundary(null,[1,2,3]),[]) assert_equal(nx.node_boundary(null,[1,2,3],[4,5,6]),[]) assert_equal(nx.node_boundary(null,[1,2,3],[3,4,5]),[])
def test_null_graph(self): """Tests that the null graph has empty node boundaries.""" null = nx.null_graph() assert_equal(nx.node_boundary(null, []), set()) assert_equal(nx.node_boundary(null, [], []), set()) assert_equal(nx.node_boundary(null, [1, 2, 3]), set()) assert_equal(nx.node_boundary(null, [1, 2, 3], [4, 5, 6]), set()) assert_equal(nx.node_boundary(null, [1, 2, 3], [3, 4, 5]), set())
def test_null_node_boundary(self): """null graph has empty node boundaries""" null = self.null assert_equal(nx.node_boundary(null, []), []) assert_equal(nx.node_boundary(null, [], []), []) assert_equal(nx.node_boundary(null, [1, 2, 3]), []) assert_equal(nx.node_boundary(null, [1, 2, 3], [4, 5, 6]), []) assert_equal(nx.node_boundary(null, [1, 2, 3], [3, 4, 5]), [])
def closeSeparator(graph: networkx.Graph, vertexSetA, b): neighborhoodOfA = networkx.node_boundary(graph, vertexSetA) # can this be optimized away? graph = graph.copy() graph.remove_nodes_from(neighborhoodOfA) verticesOfConnectedComponentContainingB = networkx.node_connected_component( graph, b) return set( networkx.node_boundary(graph, verticesOfConnectedComponentContainingB))
def test_directed(self): """Tests the node boundary of a directed graph.""" G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)]) S = {0, 1} boundary = nx.node_boundary(G, S) expected = {2} assert_equal(boundary, expected)
def get_cone(G, l, ideal): cone = ideal while l > 0: for item in list(nx.node_boundary(G, cone)): cone.append(item) l -= 1 return cone
def boundary_expansion(G, S): """Returns the boundary expansion of the set `S`. The *boundary expansion* is the quotient of the size of the edge boundary and the cardinality of *S*. [1] Parameters ---------- G : NetworkX graph S : sequence A sequence of nodes in `G`. Returns ------- number The boundary expansion of the set `S`. See also -------- edge_expansion mixing_expansion node_expansion References ---------- .. [1] Vadhan, Salil P. "Pseudorandomness." *Foundations and Trends in Theoretical Computer Science* 7.1–3 (2011): 1–336. <http://dx.doi.org/10.1561/0400000010> """ return len(nx.node_boundary(G, S)) / len(S)
def get_boundary(self, vertices, remove_danglers=False): """Return interior and exterior boundary sets for `vertices`. If `remove_danglers` is True vertices in the internal boundary with only one neighbor in the internal boundary will be moved to the external boundary. """ if not len(vertices): return [], [] import networkx as nx # Use networkx to get external boundary external_boundary = set(nx.node_boundary(self.graph, vertices)) # Find adjacent vertices to get inner boundary internal_boundary = set.union(*[set(self.graph[v].keys()) for v in external_boundary]).intersection(set(vertices)) if remove_danglers: ingraph = self.graph.subgraph(internal_boundary) danglers = [n for n,d in ingraph.degree().items() if d==1] while danglers: internal_boundary -= set(danglers) external_boundary |= set(danglers) ingraph = self.graph.subgraph(internal_boundary) danglers = [n for n,d in ingraph.degree().items() if d<2] return list(internal_boundary), list(external_boundary)
def test_multigraph(self): """Tests the node boundary of a multigraph.""" G = nx.MultiGraph(list(nx.cycle_graph(5).edges()) * 2) S = {0, 1} boundary = nx.node_boundary(G, S) expected = {2, 4} assert_equal(boundary, expected)
def get_boundary(surf, vertices, remove_danglers=False): """Return interior and exterior boundary sets for `vertices`. If `remove_danglers` is True vertices in the internal boundary with only one neighbor in the internal boundary will be moved to the external boundary. """ if not len(vertices): return [], [] import networkx as nx # Use networkx to get external boundary external_boundary = set(nx.node_boundary(surf.graph, vertices)) # Find adjacent vertices to get inner boundary internal_boundary = set.union(*[set(surf.graph[v].keys()) for v in external_boundary]).intersection(set(vertices)) if remove_danglers: ingraph = surf.graph.subgraph(internal_boundary) danglers = [n for n,d in ingraph.degree().items() if d==1] while danglers: internal_boundary -= set(danglers) external_boundary |= set(danglers) ingraph = surf.graph.subgraph(internal_boundary) danglers = [n for n,d in ingraph.degree().items() if d<2] return list(internal_boundary), list(external_boundary)
def boundary_expansion(G, S): """Returns the boundary expansion of the set ``S``. The *boundary expansion* is the quotient of the size of the edge boundary and the cardinality of *S*. [1] Parameters ---------- G : NetworkX graph S : sequence A sequence of nodes in ``G``. Returns ------- number The boundary expansion of the set ``S``. See also -------- edge_expansion mixing_expansion node_expansion References ---------- .. [1] Vadhan, Salil P. "Pseudorandomness." *Foundations and Trends in Theoretical Computer Science* 7.1–3 (2011): 1–336. <http://dx.doi.org/10.1561/0400000010> """ return len(nx.node_boundary(G, S)) / len(S)
def test_multidigraph(self): """Tests the edge boundary of a multdiigraph.""" edges = [(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)] G = nx.MultiDiGraph(edges * 2) S = {0, 1} boundary = nx.node_boundary(G, S) expected = {2} assert_equal(boundary, expected)
def test_complete_graph(self): K10 = cnlti(nx.complete_graph(10), first_label=1) assert_equal(nx.node_boundary(K10, []), set()) assert_equal(nx.node_boundary(K10, [], []), set()) assert_equal(nx.node_boundary(K10, [1, 2, 3]), {4, 5, 6, 7, 8, 9, 10}) assert_equal(nx.node_boundary(K10, [4, 5, 6]), {1, 2, 3, 7, 8, 9, 10}) assert_equal(nx.node_boundary(K10, [3, 4, 5, 6, 7]), {1, 2, 8, 9, 10}) assert_equal(nx.node_boundary(K10, [4, 5, 6], []), set()) assert_equal(nx.node_boundary(K10, K10), set()) assert_equal(nx.node_boundary(K10, [1, 2, 3], [3, 4, 5]), {4, 5})
def get_boundary_nodes(G, district): #takes in VTD adjacency graph G and district identifier (string), #returns list of boundary nodes in that district complement_nodes = [] for node in G.nodes(): if G.node[node]['DISTRICT'] != district: complement_nodes.append(node) boundary_nodes_of_district = nx.node_boundary(G, complement_nodes) return boundary_nodes_of_district
def test_path_graph(self): P10 = cnlti(nx.path_graph(10), first_label=1) assert_equal(nx.node_boundary(P10, []), set()) assert_equal(nx.node_boundary(P10, [], []), set()) assert_equal(nx.node_boundary(P10, [1, 2, 3]), {4}) assert_equal(nx.node_boundary(P10, [4, 5, 6]), {3, 7}) assert_equal(nx.node_boundary(P10, [3, 4, 5, 6, 7]), {2, 8}) assert_equal(nx.node_boundary(P10, [8, 9, 10]), {7}) assert_equal(nx.node_boundary(P10, [4, 5, 6], [9, 10]), set())
def test_path_graph(self): P10 = cnlti(nx.path_graph(10), first_label=1) assert nx.node_boundary(P10, []) == set() assert nx.node_boundary(P10, [], []) == set() assert nx.node_boundary(P10, [1, 2, 3]) == {4} assert nx.node_boundary(P10, [4, 5, 6]) == {3, 7} assert nx.node_boundary(P10, [3, 4, 5, 6, 7]) == {2, 8} assert nx.node_boundary(P10, [8, 9, 10]) == {7} assert nx.node_boundary(P10, [4, 5, 6], [9, 10]) == set()
def star_decomp(G, center, delta, eps, num_edges): H = G.copy() distances, radius = distances_to_center(G, center) ball_radius, ball = ball_cut(G, distances, radius, delta, num_edges, center) node_boundary = set(nx.node_boundary(G, ball)) H.remove_nodes_from(ball) cones, anchors = cone_decomp(H, node_boundary, eps * radius / 2, num_edges) bridges = list(get_bridges(G, center, anchors, floor(ball_radius))) partitions = [(ball, center)] + cones return partitions, bridges
def cone_cut(G, x, l, L, S, num_edges, distances): r = l ideal = get_ideal(G, S, x, distances) cone = ideal if r == 0 else get_cone(G, r, ideal) mu, cone_cut_size = cone_properties(G, cone, num_edges) while cone_cut_size > mu / (L - l): for item in list(nx.node_boundary(G, cone)): cone.append(item) mu, cone_cut_size = cone_properties(G, cone, num_edges) r += 1 return r, cone
def test_path_node_boundary(self): """Check node boundaries in path nxgraph.""" P10=self.P10 assert_equal(nx.node_boundary(P10,[]),[]) assert_equal(nx.node_boundary(P10,[],[]),[]) assert_equal(nx.node_boundary(P10,[1,2,3]),[4]) assert_equal(sorted(nx.node_boundary(P10,[4,5,6])),[3, 7]) assert_equal(sorted(nx.node_boundary(P10,[3,4,5,6,7])),[2, 8]) assert_equal(nx.node_boundary(P10,[8,9,10]),[7]) assert_equal(sorted(nx.node_boundary(P10,[4,5,6],[9,10])),[])
def test_path_node_boundary(self): """Check node boundaries in path graph.""" P10 = self.P10 assert_equal(nx.node_boundary(P10, []), []) assert_equal(nx.node_boundary(P10, [], []), []) assert_equal(nx.node_boundary(P10, [1, 2, 3]), [4]) assert_equal(sorted(nx.node_boundary(P10, [4, 5, 6])), [3, 7]) assert_equal(sorted(nx.node_boundary(P10, [3, 4, 5, 6, 7])), [2, 8]) assert_equal(nx.node_boundary(P10, [8, 9, 10]), [7]) assert_equal(sorted(nx.node_boundary(P10, [4, 5, 6], [9, 10])), [])
def test_k10_node_boundary(self): """Check node boundaries in K10""" K10=self.K10 assert_equal(nx.node_boundary(K10,[]),[]) assert_equal(nx.node_boundary(K10,[],[]),[]) assert_equal(sorted(nx.node_boundary(K10,[1,2,3])), [4, 5, 6, 7, 8, 9, 10]) assert_equal(sorted(nx.node_boundary(K10,[4,5,6])), [1, 2, 3, 7, 8, 9, 10]) assert_equal(sorted(nx.node_boundary(K10,[3,4,5,6,7])), [1, 2, 8, 9, 10]) assert_equal(nx.node_boundary(K10,[4,5,6],[]),[]) assert_equal(nx.node_boundary(K10,K10),[]) assert_equal(nx.node_boundary(K10,[1,2,3],[3,4,5]),[4, 5])
def test_k10_node_boundary(self): """Check node boundaries in K10""" K10 = self.K10 assert_equal(nx.node_boundary(K10, []), []) assert_equal(nx.node_boundary(K10, [], []), []) assert_equal(sorted(nx.node_boundary(K10, [1, 2, 3])), [4, 5, 6, 7, 8, 9, 10]) assert_equal(sorted(nx.node_boundary(K10, [4, 5, 6])), [1, 2, 3, 7, 8, 9, 10]) assert_equal(sorted(nx.node_boundary(K10, [3, 4, 5, 6, 7])), [1, 2, 8, 9, 10]) assert_equal(nx.node_boundary(K10, [4, 5, 6], []), []) assert_equal(nx.node_boundary(K10, K10), []) assert_equal(nx.node_boundary(K10, [1, 2, 3], [3, 4, 5]), [4, 5])
def labelset(self, label): """ Return the set of nodeids for |EPs| that share a label. Args: label: The label that returned nodeids share. Returns: A set of nodeids, which may be an empty set. """ if label not in self._graph.labels: raise XmrsStructureError( 'Cannot get labelset for {}. It is not used as a label.' .format(str(label)) ) lblset = set(nx.node_boundary(self._graph, [label])) return lblset
def update(self, node, old_district, new_district): other_districts_nodes = nx.Graph() for i in range(self.number_districts): if i is not old_district: other_districts_nodes.add_nodes_from(self.districts[i].nodes()) other_districts = self.G.subgraph(other_districts_nodes) old_boundary_nodes = nx.node_boundary(self.G, [node], other_districts) for n in old_boundary_nodes: if n in self.boundary_nodes: self.boundary_nodes.remove(n) if n in self.boundary_nodes_list: self.boundary_nodes_list[old_district].remove(n) updated_old_district = set(self.district_list[old_district].nodes()) updated_old_district.remove(node) updated_new_district = set(self.district_list[new_district].nodes()) updated_new_district.add(node) self.G.node[node]["district"] = new_district self.district_list[old_district] = self.G.subgraph(updated_old_district) self.district_list[new_district] = self.G.subgraph(updated_new_district)
def resolve_input_boundary(flat_graph, non_subject_nodes): pre_run_result_dict: Dict[pe.Node, InterfaceResult] = dict() for (u, v, c) in nx.edge_boundary(flat_graph, non_subject_nodes, data=True): if u not in pre_run_result_dict: pre_run_result_dict[u] = u.run() connections = c["connect"] result = pre_run_result_dict[u] assert result.outputs is not None for u_field, v_field in connections: if isinstance(u_field, tuple): raise NotImplementedError() value = result.outputs.trait_get()[u_field] v.set_input(v_field, value) for u in pre_run_result_dict.keys(): rmtree(u.output_dir(), ignore_errors=True) flat_graph.remove_node(u) assert len(nx.node_boundary(flat_graph, non_subject_nodes)) == 0
def __generate_candidate_graph(self, seed, max_size): logging.debug('seed: {}'.format(seed)) subnetwork = [seed] SS = self.S.loc[seed, seed] decrements = [np.sqrt(SS)] while len(subnetwork) < max_size: # find the nodes that are adjacent to current network f boundary_nodes = nx.node_boundary(self.M, subnetwork) if not boundary_nodes: logging.debug('No further connected nodes') break maxgain = -np.inf i = len(subnetwork) # for each possible node to add to subnetwork for v in boundary_nodes: # compute the decrease in SSE due to addition of this node # for explanation of this computation see documentation SStrial = (SS + 2 * np.sum(self.S.loc[subnetwork, v]) + self.S.loc[v, v]) # how much would addition of this node improve the avg error? SStest = SStrial / (i + 1) - SS / i if SStest > maxgain: maxgain = SStest bestSS = SStrial best = v if maxgain < 0.0: # no adjacent nodes improve the error logging.debug('No further improvement possible.') break # add the node that improves error most to the growing subnetwork SS = bestSS subnetwork.append(best) decrements.append(np.sqrt(maxgain)) logging.debug( 'subnetwork size {} -- added {}, improvement {}'.format( len(subnetwork), best, np.sqrt(maxgain))) c = self.__average_column(subnetwork) error = np.linalg.norm(self.X - self.__outer(c, subnetwork)) return subnetwork, error, decrements
def remove_duplicates(G, communities, delta): # Create node2com dictionary node2com = defaultdict(list) com_id = 0 for comm in communities: for node in comm: node2com[node].append(com_id) com_id += 1 deleted = dict() i = 0 for i in range(len(communities)): comm = communities[i] if deleted.get(i, 0) == 0: nbrnodes = nx.node_boundary(G, comm) for nbr in nbrnodes: nbrcomids = node2com[nbr] for nbrcomid in nbrcomids: if i != nbrcomid and deleted.get( i, 0) == 0 and deleted.get(nbrcomid, 0) == 0: nbrcom = communities[nbrcomid] distance = 1.0 - (len(set(comm) & set(nbrcom)) * 1.0 / (min(len(comm), len(nbrcom)) * 1.0)) if distance <= delta: # Near duplicate communities found. # Discard current community # Followed the idea of Lee et al. in GCE deleted[i] = 1 for node in comm: node2com[node].remove(i) for i in range(len(communities)): if deleted.get(i, 0) == 1: communities[i] = [] communities = filter(lambda c: c != [], communities) # Discard empty communities return communities
def wiki_distance(start_page_name, end_page_name): depth = 0 G = nx.Graph() articles = scrape_page(start_page_name) global PAGES_SCRAPED PAGES_SCRAPED += 1 print(PAGES_SCRAPED) for article in articles: G.add_edge(start_page_name, article) while True: try: shortest_path = nx.shortest_path( G, source=start_page_name, target=end_page_name ) break except NetworkXError: print('No path found, continuing...') print(depth) depth += 1 for article_boundary in nx.node_boundary(G, [start_page_name]): new_articles = scrape_page(article_boundary) for new_article in new_articles: G.add_edge(article_boundary, new_article) print('============= Next iteration') length = len(shortest_path) - 1 print( 'The shortest path is {}, and the distance is {}'.format( str(shortest_path), length ) ) return length
def listMinimalSeparatorsPrivate(graph: networkx.Graph, A, U, a, b, results): # given A, compute componentOfSeparatedGraphContainingA, which is V(Ca(S(A))) SeparatorA = TakataSeparator.closeSeparator(graph, A, b) separatedGraph = graph.copy() separatedGraph.remove_nodes_from(SeparatorA) componentOfSeparatedGraphContainingA = networkx.node_connected_component( separatedGraph, a) if len(componentOfSeparatedGraphContainingA.union( U)) == 0: # subtree is not barren newA = componentOfSeparatedGraphContainingA neighborhoodOfNewA = set(networkx.node_boundary(newA)) possibleExpansions = set(neighborhoodOfNewA).difference(U) if len(possibleExpansions) != 0: for v in possibleExpansions: TakataSeparator.listMinimalSeparatorsPrivate( graph, newA.union(v), U, a, b, results) TakataSeparator.listMinimalSeparatorsPrivate( graph, newA, U.union(v), a, b, results) else: # base case, node is a leaf results.add(TakataSeparator.closeSeparator(newA)) else: # The subtree is barren. pass
def boundary_nodes(self, nbunch, nbunch2 = None): nbunch = (n.node_id for n in nbunch) # only store the id in overlay return iter(nidb_node(self, node) for node in nx.node_boundary(self._graph, nbunch, nbunch2))
def get_boundary_nodes(network, subnetwork): #gets the list of nodes which sit on the edge of the network nx.node_boundary(network, subnetwork)
def boundary_nodes(self, nbunch, nbunch2=None): nbunch = (n.node_id for n in nbunch) # only store the id in overlay return iter( nidb_node(self, node) for node in nx.node_boundary(self._graph, nbunch, nbunch2))
def cheeger(G, k): return min([ float(len(nx.node_boundary(G, sample(G.nodes(), k)))) / k for n in range(100) ])
def initialize_boundary(self): for i in range(self.number_districts): X = list(nx.node_boundary(self.G, self.districts[i].nodes())) self.boundary_nodes = self.boundary_nodes.union(X) self.boundary_nodes_list.append(X)
def random_neighbor(self, graph): return random.sample(nx.node_boundary(self.G, graph.nodes()), 1)
def cheeger(G, k): return min(len(nx.node_boundary(G, nn)) / k for nn in combinations(G, k))
def cheeger(G, k): return min( float(len(nx.node_boundary(G, nn))) / k for nn in combinations(G, k))
print( f"Graph has {nx.number_of_nodes(G)} nodes with {nx.number_of_edges(G)} edges" ) print(f"{nx.number_connected_components(G)} connected components") for (source, target) in [("chaos", "order"), ("nodes", "graph"), ("pound", "marks")]: print(f"Shortest path between {source} and {target} is") try: shortest_path = nx.shortest_path(G, source, target) for n in shortest_path: print(n) except nx.NetworkXNoPath: print("None") # draw a subset of the graph boundary = list(nx.node_boundary(G, shortest_path)) G.add_nodes_from(shortest_path, color="red") G.add_nodes_from(boundary, color="blue") H = G.subgraph(shortest_path + boundary) colors = nx.get_node_attributes(H, "color") options = { "node_size": 1500, "alpha": 0.3, "node_color": colors.values(), } pos = nx.kamada_kawai_layout(H) nx.draw(H, pos, **options) nx.draw_networkx_labels(H, pos, font_weight="bold") plt.show()
def cheeger(G,k): return min([float(len(nx.node_boundary(G,sample(G.nodes(),k))))/k for n in range(100)])
def unified_network_drawer( G, correlation_table, names, filename=None, low_threshold=0.5, hi_threshold=0.9, cols=None, label_fontsize=8, edge_alpha=1.0, trim_isolated_nodes=False, max_links=9999999, labels=True, node_size=100, edges=True, save_gml=False, layout="neato", mark_clusters=False, cluster_alpha_back=0.8, cluster_node_size=3000, node_alpha=0.6, nodes=True, cluster_alpha_back2=1.0, mark_path=None, mark_paths=None, path_color='red', title=None, edge_pad=0.03, title_font_size=12, traversal_weight=0.0, draw_node_boundary=False, node_boundary=None, width_adjuster=20, # default for MDSquish layout_data=None, # preexisting layout data **kargs): """ In the kargs: expected_branches edge_color edge_width unified network draw system. In use by: network.conditions() network.genes() mdsquish.network() TODO: The edge_width is a bit messy. There are three major arguments: edge_width, width_adjuster, traversal_weight and they interact in complicated ways. zorder, lowest is further back higher is further forward """ # Kargs and defaults: edge_color = 'grey' edge_width = 1.0 if 'edge_color' in kargs and kargs['edge_color']: edge_color = kargs['edge_color'] if 'edge_width' in kargs and kargs['edge_width']: edge_width = kargs['edge_width'] # optional return data ret_groups = None ret_nodes = None ret_edges = None if layout_data: pos = layout_data else: #pos = nx.drawing.nx_agraph.graphviz_layout(G, layout) # Bug in NX 1.11 #A = nx.to_agraph(G) #pos = A.graphviz_layout(G, layout) # pygraphviz is no longer avaialble ... pos = nx.spring_layout(G) # trim isolated nodes if trim_isolated_nodes: # The problem is, all the attributes are also unsynced... pass fig = gldraw.getfigure(**kargs) ax = fig.add_subplot(111) # get cols back in the node order: # Nice Py2.7 line put back to uglier style. #sam_map = {cond: cols[idx] for idx, cond in enumerate(self.getConditionNames())} # reorder if cols: sam_map = dict((cond, cols[idx]) for idx, cond in enumerate(names)) cols = [sam_map[cond] for cond in G] else: cols = "grey" if node_boundary: # Make the background nodes not in the node boundary more transparent node_alpha = 0.1 if nodes: draw_nodes(G, pos, ax=ax, node_size=node_size, node_color=cols, alpha=node_alpha, linewidths=0, zorder=5) #print 'univerted:', [2.0-(i[2]['weight']) for i in G.edges(data=True)] elarge = [ (u, v, d) for (u, v, d) in G.edges(data=True) if ((traversal_weight + 1.0) - d['weight']) >= hi_threshold ] # I pad and invert the weight so that pathfinding works correctly esmall = [(u, v, d) for (u, v, d) in G.edges(data=True) if ((traversal_weight + 1.0) - d['weight']) < hi_threshold ] # valid as all edges must be less than 1.0-hi # mark clusters if mark_clusters: groups = hierarchical_clusters(G, correlation_table, names, mark_clusters) # get a colormap for the groups: colormap = cm.get_cmap("Set3", len(groups) + 1) colormap = colormap(numpy.arange(len(groups) + 1)) # draw the groups by size? gsizes = {g: len(groups[g]) for g in groups} # Assume py2.7 now... for g in groups: node_color = utils.rgba_to_hex( colormap[int(g.replace("cluster_", "")) - 1]) draw_nodes(G, pos, ax=ax, nodelist=groups[g], node_size=cluster_node_size, node_col_override=node_color, node_color=node_color, alpha=cluster_alpha_back2, linewidths=0, zorder=-gsizes[g]) # Draw an alpha box over the entire network to fade out the groups # This could be replaced by imshow for nicer effect. xl = ax.get_xlim() yl = ax.get_ylim() if cluster_alpha_back: ax.add_patch( matplotlib.patches.Rectangle((xl[0], yl[0]), xl[1] - xl[0], yl[1] - yl[0], facecolor="white", edgecolor='none', zorder=0, alpha=cluster_alpha_back)) ret_groups = groups # edges if edges: draw_edges(G, pos, ax, edgelist=elarge, width=edge_width, width_adjuster=width_adjuster, alpha=edge_alpha, edge_color='#666666', traversal_weight=traversal_weight, zodrder=4) draw_edges(G, pos, ax, edgelist=esmall, width=edge_width, width_adjuster=width_adjuster, alpha=edge_alpha / 2.0, edge_color='#bbbbbb', traversal_weight=traversal_weight, zorder=3) # labels if labels: draw_node_labels(G, pos, ax=ax, font_size=label_fontsize, font_family='sans-serif', zorder=5) if mark_path: if isinstance(mark_path, list): # ou are probably sending your own path draw_edges(G, pos, ax, edgelist=mark_path, width=5.0, alpha=1.0, edge_color=path_color, width_adjuster=width_adjuster * 2.0, traversal_weight=traversal_weight, zorder=6) # in front of nodes else: ret_nodes, ret_edges = path_func_mapper[mark_path]( G, **kargs) # call the appropriate function cmap = cm.get_cmap("gist_ncar", len(ret_edges)) cmap = cmap(numpy.arange(len(ret_edges))) color = [ utils.rgba_to_hex(cmap[i]) for i, e in enumerate(ret_edges) ] for i, e in enumerate(ret_edges): draw_edges(G, pos, ax, edgelist=e, width=3.0, alpha=1.0, edge_color=color[i], traversal_weight=traversal_weight, zorder=6) # Don't draw the nodes, you also would need to get out the node name and properly reorder the node_size mark_path = ret_nodes # For compatibility with network_boundary elif mark_paths: for p in mark_paths: draw_edges(G, pos, ax, edgelist=mark_path, width=5.0, alpha=0.5, edge_color=path_color, width_adjuster=300, traversal_weight=traversal_weight, zorder=6) # in front of nodes # node_boundary if draw_node_boundary: if node_boundary: boundary = node_boundary # I assume it is already a boundary elif mark_path: # use a path if no network_boundary sent boundary = nx.node_boundary(G, mark_path) else: raise AssertionError( 'asked to draw a boundary, but no network_boundary or path available' ) draw_nodes( G, pos, ax=ax, nodelist=boundary, node_size=node_size * 1.2, #don't use node_color, see if the draw_nodes can pick it up from the attributes alpha=0.9, linewidths=0.0, zorder=3) # clean up matplotlib gubbins: ax.set_position([0, 0, 1, 1]) ax.set_frame_on(False) ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) # make nice edges (by default it chooses far too generous borders): xy = numpy.asarray([pos[v] for v in G.nodes()]) x_min, x_max = min(xy[:, 0]), max(xy[:, 0]) y_min, y_max = min(xy[:, 1]), max(xy[:, 1]) x_pad = (x_max - x_min) * edge_pad y_pad = (y_max - y_min) * edge_pad ax.set_xlim(x_min - x_pad, x_max + x_pad) ax.set_ylim(y_min - y_pad, y_max + y_pad) if title: #ax.set_title(title) ax.text(x_min - (x_pad // 2), y_min - (y_pad // 2), title, ha='left', size=title_font_size) if save_gml: nx.write_gml(G, save_gml) config.log.info("network_drawer: saved GML '%s'" % save_gml) actual_filename = gldraw.savefigure(fig, filename) # Load the return data: ret = {"actual_filename": actual_filename} if ret_groups: ret["groups"] = ret_groups if mark_path: ret["nodes"] = ret_nodes ret["edges"] = ret_edges return (ret)