def test_directed(self): """Tests the edge boundary of a directed graph.""" G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)]) S = {0, 1} boundary = list(nx.edge_boundary(G, S)) expected = [(1, 2)] assert_equal(boundary, expected)
def test_null_graph(self): null = nx.null_graph() assert_equal(list(nx.edge_boundary(null, [])), []) assert_equal(list(nx.edge_boundary(null, [], [])), []) assert_equal(list(nx.edge_boundary(null, [1, 2, 3])), []) assert_equal(list(nx.edge_boundary(null, [1, 2, 3], [4, 5, 6])), []) assert_equal(list(nx.edge_boundary(null, [1, 2, 3], [3, 4, 5])), [])
def cut_size(G, S, T=None, weight=None): """Returns the size of the cut between two sets of nodes. A *cut* is a partition of the nodes of a graph into two sets. The *cut size* is the sum of the weights of the edges "between" the two sets of nodes. Parameters ---------- G : NetworkX graph S : sequence A sequence of nodes in `G`. T : sequence A sequence of nodes in `G`. If not specified, this is taken to be the set complement of `S`. weight : object Edge attribute key to use as weight. If not specified, edges have weight one. Returns ------- number Total weight of all edges from nodes in set `S` to nodes in set `T` (and, in the case of directed graphs, all edges from nodes in `T` to nodes in `S`). Examples -------- In the graph with two cliques joined by a single edges, the natural bipartition of the graph into two blocks, one for each clique, yields a cut of weight one:: >>> G = nx.barbell_graph(3, 0) >>> S = {0, 1, 2} >>> T = {3, 4, 5} >>> nx.cut_size(G, S, T) 1 Each parallel edge in a multigraph is counted when determining the cut size:: >>> G = nx.MultiGraph(["ab", "ab"]) >>> S = {"a"} >>> T = {"b"} >>> nx.cut_size(G, S, T) 2 Notes ----- In a multigraph, the cut size is the total weight of edges including multiplicity. """ edges = nx.edge_boundary(G, S, T, data=weight, default=1) if G.is_directed(): edges = chain(edges, nx.edge_boundary(G, T, S, data=weight, default=1)) return sum(weight for u, v, weight in edges)
def updatebandd(edge, node, old_district, new_district): #Update boundary and districts global boundary_edges global DList K = nx.edge_boundary(DList[old_district], [node]) for x in K: x = x boundary_edges = boundary_edges.union(set([x])) boundary_edges = boundary_edges.union(set([(x[1],x[0])])) #adding x adds the directed edge x new_oldnodes = set(DList[old_district].nodes()) new_oldnodes.remove(node) new_newnodes = set(DList[new_district].nodes()) new_newnodes.add(node) G.node[node]["district"] = new_district DList[old_district] = G.subgraph(new_oldnodes) DList[new_district] = G.subgraph(new_newnodes) #print(nx.edge_boundary(DList[new_district],[node])) K= nx.edge_boundary(DList[new_district],[node]) for x in K: x = x boundary_edges.discard(x) boundary_edges.discard((x[1],x[0]))
def mincut_fanout(g, s, T, tau, k, num_samples): ''' Generates warm starts for the SNARES algorithm (a list of strategies for the defender and attacker) ''' import random g = nx.DiGraph(g) for u,v in g.edges(): g[u][v]['capacity'] = 1 for v in g.successors(s): g[s][v]['capacity'] = np.inf best_t = T[np.argmax([tau[t] for t in T])] # min_cut = nx.minimum_edge_cut(g, s, best_t) part1, part2 = nx.minimum_cut(g, s, best_t)[1] if s in part1: min_cut = nx.edge_boundary(g, part1) else: min_cut = nx.edge_boundary(g, part2) defender_strats = [] attacker_strats = [] if len(min_cut) < k: defender_strats.append(min_cut) attacker_strats.append(attacker_br_pure(g, min_cut, s, T, tau)) return defender_strats, attacker_strats for i in range(num_samples): defender_strats.append(random.sample(min_cut, k)) attacker_strats.append(attacker_br_pure(g, defender_strats[-1], s, T, tau)) return defender_strats, attacker_strats
def cut_size(G, S, T=None, weight=None): """Returns the size of the cut between two sets of nodes. A *cut* is a partition of the nodes of a graph into two sets. The *cut size* is the sum of the weights of the edges "between" the two sets of nodes. Parameters ---------- G : NetworkX graph S : sequence A sequence of nodes in ``G``. T : sequence A sequence of nodes in ``G``. If not specified, this is taken to be the set complement of ``S``. weight : object Edge attribute key to use as weight. If not specified, edges have weight one. Returns ------- number Total weight of all edges from nodes in set ``S`` to nodes in set ``T`` (and, in the case of directed graphs, all edges from nodes in ``T`` to nodes in ``S``). Examples -------- In the graph with two cliques joined by a single edges, the natural bipartition of the graph into two blocks, one for each clique, yields a cut of weight one:: >>> G = nx.barbell_graph(3, 0) >>> S = {0, 1, 2} >>> T = {3, 4, 5} >>> nx.cut_size(G, S, T) 1 Each parallel edge in a multigraph is counted when determining the cut size:: >>> G = nx.MultiGraph(['ab', 'ab']) >>> S = {'a'} >>> T = {'b'} >>> nx.cut_size(G, S, T) 2 Notes ----- In a multigraph, the cut size is the total weight of edges including multiplicity. """ edges = nx.edge_boundary(G, S, T, data=weight, default=1) if G.is_directed(): edges = chain(edges, nx.edge_boundary(G, T, S, data=weight, default=1)) return sum(weight for u, v, weight in edges)
def backward_step(): ''' Poiseuille flow with a boundary asymmetry ''' m = 22 n = 67 step_height = 12 step_width = 10 obstacle = gds.utils.flatten([[(j, i) for i in range(step_height)] for j in range(step_width)]) G, (l, r, t, b) = gds.triangular_lattice(m, n, with_boundaries=True) for g in [G, l, r, t, b]: g.remove_nodes_from(obstacle) for i in range(step_width + 1): b.add_node((i, step_height)) if i > 0: b.add_edge((i - 1, step_height), (i, step_height)) for j in range(step_height + 1): b.add_node((step_width, j)) if i > 0: b.add_edge((step_width, j - 1), (step_width, j)) G.remove_edges_from(list(nx.edge_boundary(G, l, l))) G.remove_edges_from( list( nx.edge_boundary(G, [(0, 2 * i + 1) for i in range(m // 2)], [(1, 2 * i) for i in range(m // 2 + 1)]))) G.remove_edges_from(list(nx.edge_boundary(G, r, r))) G.remove_edges_from( list( nx.edge_boundary(G, [(n // 2, 2 * i + 1) for i in range(m // 2)], [(n // 2, 2 * i) for i in range(m // 2 + 1)]))) weight = 1. nx.set_edge_attributes(G, weight, name='w') inlet_v = 1.0 # outlet_v=2*(m - step_height - 2)*inlet_v / (m - 2) outlet_p = 0.0 # ref_p=0.0 # grad_p=100.0 velocity, pressure = navier_stokes(G, viscosity=100., density=1.0, inlets=l.nodes, outlets=r.nodes, w_key='w') pressure.set_constraints(dirichlet=gds.combine_bcs( # {n: grad_p/2 for n in l.nodes}, # {n: -grad_p/2 for n in r.nodes if n[1] > step_height//2} {(n // 2 + 1, j): outlet_p for j in range(n)} # {(n//2+1,m): ref_p} )) velocity.set_constraints(dirichlet=gds.combine_bcs( {((0, i), (1, i)): inlet_v for i in range(step_height + 1, m)}, # {((n//2, i), (n//2+1, i)): outlet_v for i in range(1, m)}, # {((n//2-1, 2*i+1), (n//2, 2*i+1)): outlet_v for i in range(0, m//2)}, gds.zero_edge_bc(t), gds.zero_edge_bc(b), )) return velocity, pressure
def test_null_edge_boundary(self): """null nxgraph has empty edge boundaries""" null=self.null assert_equal(nx.edge_boundary(null,[]),[]) assert_equal(nx.edge_boundary(null,[],[]),[]) assert_equal(nx.edge_boundary(null,[1,2,3]),[]) assert_equal(nx.edge_boundary(null,[1,2,3],[4,5,6]),[]) assert_equal(nx.edge_boundary(null,[1,2,3],[3,4,5]),[])
def test_null_edge_boundary(self): """null graph has empty edge boundaries""" null = self.null assert_equal(nx.edge_boundary(null, []), []) assert_equal(nx.edge_boundary(null, [], []), []) assert_equal(nx.edge_boundary(null, [1, 2, 3]), []) assert_equal(nx.edge_boundary(null, [1, 2, 3], [4, 5, 6]), []) assert_equal(nx.edge_boundary(null, [1, 2, 3], [3, 4, 5]), [])
def boundary_edges(G,zones): """ return set of boundary edges between the zones zones should be a list of subgraphs of G """ edges = set() for z in zones: edges.update(nx.edge_boundary(G,z.nodes())) nodes = set() n2n = {} for u,v in edges: nodes.update({u}) nodes.update({v}) #try: # n2n[u] += [v] #except KeyError: # n2n[u] = [v] # #try: # n2n[v] += [u] #except KeyError: # n2n[v] = [u] Gbound = nx.Graph(G.subgraph(nodes)) for comp in nx.connected_components(Gbound): for nn in comp: n2n[nn] = comp.difference({nn}) return edges,n2n
def stopgap_nodes(g, department='', k=20): department_node_ids = [ node for node in g.nodes if department == get_prefix(node) ] res = [] for node_id in department_node_ids: print(node_id) # out degree leading into the department out_degree_internal = sum([ i[2] for i in nx.edge_boundary( g, [node_id], department_node_ids, data='weight') ]) sample_edges = [edge for edge in g.edges if edge[1] == node_id] if len(sample_edges) == 0: continue sample_edge = sample_edges[0] count = g.get_edge_data(sample_edge[0], sample_edge[1])['count_course'] if out_degree_internal == 0: out_degree_internal = 1 print( f"{int(out_degree_internal)} total {department} courses enrolled by {int(count)} students after course {node_id}" ) coeff = out_degree_internal / count res.append((node_id, coeff)) res = sorted(res, key=lambda x: x[1], reverse=True)[:k] return res
def edges_beetween(G, a, b): """Return the number of edges between two sets of nodes WARNING: a and b should have no element in common. """ return len(list(nx.edge_boundary(G, a, b)))
def InitializeBoundary(): global boundary_edges m = 9 for i in range(m): Y = nx.edge_boundary(G,DList[i].nodes()) X =set(Y) boundary_edges = boundary_edges.union(X)
def conductance(G, S): if len(S.nodes()) == 0 or len(S.nodes()) == 1: return 0 else: num_cut_edges = len(nx.edge_boundary(G, S)) * 1.0 volume_S = sum(G.degree(S).values()) * 1.0 return num_cut_edges / volume_S
def cal_modularity_change(node, group, community_dict, graph, m): node_in = community_dict[node] k_i_in = len(nx.edge_boundary(graph,[node],node_in)) k_i = graph.degree(node) total_weight_in = sum(graph.degree(i) for i in node_in) modu_change = (k_i_in/m) + (total_weight_in * k_i)/(2 * m * m) return modu_change
def test_multigraph(self): """Tests the edge boundary of a multigraph.""" G = nx.MultiGraph(list(nx.cycle_graph(5).edges()) * 2) S = {0, 1} boundary = list(nx.edge_boundary(G, S)) expected = [(0, 4), (0, 4), (1, 2), (1, 2)] assert_equal(boundary, expected)
def _get_nncf_graph_pattern_input_output( self, match: List[str]) -> NNCFGraphPatternIO: out_edge_boundary = list( nx.edge_boundary(self._nx_graph, match, data=True)) complement = list( filter(lambda x: x not in match, self._nx_graph.nodes.keys())) in_edge_boundary = list( nx.edge_boundary(self._nx_graph, complement, data=True)) boundary = in_edge_boundary + out_edge_boundary input_nncf_edges = [] output_nncf_edges = [] input_nncf_nodes = [] output_nncf_nodes = [] for key in match: # Currently we treat the nodes without incoming edges as "input" and the nodes without # outcoming edges as "output". # A proper way to find the input nodes would be to mark the tensors arriving at NNCFNetwork's # "forward" as input, then drop the marking once the first operation with an input tensor # has been done; the node corresponding to this operation would be "input" by definition. # Same with output nodes - should check the model output for TracedTensors and mark the # nodes from which such tensors originated as "output". # TODO: implement the functionality above. if not list(self._nx_graph.successors(key)): output_nncf_nodes.append( self._nx_node_to_nncf_node(self._nx_graph.nodes[key])) if not list(self._nx_graph.predecessors(key)): input_nncf_nodes.append( self._nx_node_to_nncf_node(self._nx_graph.nodes[key])) for nx_edge in boundary: from_node_key = nx_edge[0] to_node_key = nx_edge[1] data = nx_edge[2] nncf_edge = NNCFGraphEdge( self._nx_node_to_nncf_node( self._nx_graph.nodes[from_node_key]), self._nx_node_to_nncf_node(self._nx_graph.nodes[to_node_key]), data[NNCFGraph.ACTIVATION_SHAPE_EDGE_ATTR]) if from_node_key in match: output_nncf_edges.append(nncf_edge) elif to_node_key in match: input_nncf_edges.append(nncf_edge) else: raise RuntimeError("Invalid graph expression supplied!") return NNCFGraphPatternIO(input_nncf_edges, output_nncf_edges, input_nncf_nodes, output_nncf_nodes)
def check_state(self, min_size=None): # check nodes of G are all in partition assignment V = set(self.g.nodes()) nodes = set(self.nodes.keys()) assert V == nodes # check each nodes's partition contains the node assigned_partitions = set([]) for u in V: i = self.nodes[u] assert u in set(self.partition[i]) assigned_partitions.add(i) assert assigned_partitions == set(self.partition.keys()) # check that the set of nodes in partitioning is exactly V tmp = reduce(lambda x, y: x + y, self.partition.values()) nodes = set(tmp) assert len(tmp) == len(nodes) assert V == nodes # check min size of partitions partition_sizes = map(len, self.partition.values()) if min_size != None: assert min(partition_sizes) >= min_size assert sum(partition_sizes) == len(V) assert set(self.partition_graph.nodes()) == set(self.partition.keys()) for i in self.partition_graph: for j in self.partition_graph: if i < j: continue if i == j: nodes_i = set(self.partition[i]) neighbors = [] for u in nodes_i: neighbors += self.g.neighbors(u) count = 0 for nbr in neighbors: count += nbr in nodes_i count /= 2 # each edge is double counted else: nodes_i = self.partition[i] nodes_j = self.partition[j] count = len( networkx.edge_boundary(self.g, nodes_i, nodes_j)) if self.partition_graph.has_edge(i, j): stored_count = self.partition_graph.get_edge_data( i, j, key=0)["count"] assert count == stored_count, \ "Mismatch: edges(%d,%d)=%d stored count=%d" % (i,j,count, stored_count) else: assert count == 0, \ "Mismatch: edges(%d,%d)=%d but no count stored" % (i,j, count)
def island_update(topology, new_policy): """ precondition: Assumes that only one island update is performed, and no subspace updates have been performed. This assumption is forced by our use of VLAN tags instead of MPLS labels provides per-packet """ inst.stats.tally_update(new_policy) log.info("Island update") old_policy = inst.current_abstract_policy # Switches which didn't change in new policy nops = set( s1 for s1, c1 in old_policy \ if switch_covered(c1, new_policy[s1])) # Everything else new = set(topology.switches()) - nops old = set() fixpoint = island_fixpoint(topology, new_policy) while new: additions = fixpoint(new, old) old |= new new = additions mods = old subpolicy = restrict_policy(mods, new_policy) boundary = nx.edge_boundary(topology, mods) fake_edge_ports = \ [topology.node[x]['ports'][y] for (x,y) in boundary \ if topology.node[y]['isSwitch']] # retrieve current data from inst current_internal_policy = inst.current_internal_policy current_edge_policy = inst.current_edge_policy current_version = inst.current_version current_priority = inst.current_priority # calculate new version and priority new_version = current_version + 1 new_priority = current_priority - 1 # Have to manually construct the correct edge policies by # distinguishing between "true" edge ports to hosts and "fake" # edge ports to other switches running the old version. internal_policy, edge_policy = \ mk_versioned_policies(subpolicy, new_version, new_priority, topology, old_version=current_version, fake_edge_ports=fake_edge_ports) old_internal_policy = restrict_policy(mods, current_internal_policy) old_edge_policy = restrict_policy(mods, current_edge_policy) return UpdateObject(internal_policy, edge_policy, old_internal_policy, old_edge_policy, new_priority, new_version)
def fraction_of_internal_edges(network, vertices): vertices = [v for v in vertices if network.has_node(v)] if len(vertices) == 0: return np.nan internal_edges = len(network.subgraph(vertices).copy().edges) total_edges = len(list(networkx.edge_boundary(network, vertices))) + internal_edges return internal_edges / float(total_edges)
def find_bridge_edges(raw, squelched, groupsize): groups = build_affinity_groups(squelched, groupsize) for (left, right) in itertools.combinations(groups, 2): left_name = name_subgraph(left) right_name = name_subgraph(right) boundary = nx.edge_boundary(raw, left.nodes(), right.nodes()) if boundary: yield left_name, right_name, boundary
def get_boundary_intersections( self, selected) -> Dict[int, Dict[int, frozenset]]: intersections = {bag_id: dict() for bag_id in selected} for bag_id, nbr_id in nx.edge_boundary(self.decomp, selected): assert bag_id in selected, "edge boundary pattern assumption failed" intersections[bag_id][ nbr_id] = self.bags[bag_id] & self.bags[nbr_id] return intersections
def cheeger_constant(G): num_nodes = len(G.nodes()) node_sublists = suff_small_sublists(G.nodes(), num_nodes // 2) node_sublists.remove([]) min_val = num_nodes - 1 #An upper bound on the minimum value of the edge boundary, but it's possible that no node attains this value. In the for loop, this value will be used for comparison. for sl in node_sublists: min_val = min(min_val, len(nx.edge_boundary(G, sl)) / len(sl)) return min_val
def test_multidigraph(self): """Tests the edge boundary of a multdiigraph.""" edges = [(0, 1), (1, 2), (2, 3), (3, 4), (4, 0)] G = nx.MultiDiGraph(edges * 2) S = {0, 1} boundary = list(nx.edge_boundary(G, S)) expected = [(1, 2), (1, 2)] assert_equal(boundary, expected)
def test_path_graph(self): P10 = cnlti(nx.path_graph(10), first_label=1) assert list(nx.edge_boundary(P10, [])) == [] assert list(nx.edge_boundary(P10, [], [])) == [] assert list(nx.edge_boundary(P10, [1, 2, 3])) == [(3, 4)] assert sorted(nx.edge_boundary(P10, [4, 5, 6])) == [(4, 3), (6, 7)] assert sorted(nx.edge_boundary(P10, [3, 4, 5, 6, 7])) == [(3, 2), (7, 8)] assert list(nx.edge_boundary(P10, [8, 9, 10])) == [(8, 7)] assert sorted(nx.edge_boundary(P10, [4, 5, 6], [9, 10])) == [] assert list(nx.edge_boundary(P10, [1, 2, 3], [3, 4, 5])) == [(2, 3), (3, 4)]
def check_state(self, min_size=None): # check nodes of G are all in partition assignment V = set(self.g.nodes()) nodes = set(self.nodes.keys()) assert V == nodes # check each nodes's partition contains the node assigned_partitions = set([]) for u in V: i = self.nodes[u] assert u in set(self.partition[i]) assigned_partitions.add(i) assert assigned_partitions == set(self.partition.keys()) # check that the set of nodes in partitioning is exactly V tmp = reduce(lambda x,y: x+y, self.partition.values()) nodes = set(tmp) assert len(tmp) == len(nodes) assert V == nodes # check min size of partitions partition_sizes = map(len, self.partition.values()) if min_size != None: assert min(partition_sizes) >= min_size assert sum(partition_sizes) == len(V) assert set(self.partition_graph.nodes()) == set(self.partition.keys()) for i in self.partition_graph: for j in self.partition_graph: if i < j: continue if i == j: nodes_i = set(self.partition[i]) neighbors = [] for u in nodes_i: neighbors += self.g.neighbors(u) count = 0 for nbr in neighbors: count += nbr in nodes_i count /= 2 # each edge is double counted else: nodes_i = self.partition[i] nodes_j = self.partition[j] count = len(networkx.edge_boundary(self.g, nodes_i, nodes_j)) if self.partition_graph.has_edge(i,j): stored_count = self.partition_graph.get_edge_data(i,j,key=0)["count"] assert count == stored_count, \ "Mismatch: edges(%d,%d)=%d stored count=%d" % (i,j,count, stored_count) else: assert count == 0, \ "Mismatch: edges(%d,%d)=%d but no count stored" % (i,j, count)
def test_path_edge_boundary(self): """Check edge boundaries in path nxgraph.""" P10=self.P10 assert_equal(nx.edge_boundary(P10,[]),[]) assert_equal(nx.edge_boundary(P10,[],[]),[]) assert_equal(nx.edge_boundary(P10,[1,2,3]),[(3, 4)]) assert_equal(sorted(nx.edge_boundary(P10,[4,5,6])),[(4, 3), (6, 7)]) assert_equal(sorted(nx.edge_boundary(P10,[3,4,5,6,7])),[(3, 2), (7, 8)]) assert_equal(nx.edge_boundary(P10,[8,9,10]),[(8, 7)]) assert_equal(sorted(nx.edge_boundary(P10,[4,5,6],[9,10])),[]) assert_equal(nx.edge_boundary(P10,[1,2,3],[3,4,5]) ,[(2, 3), (3, 4)])
def test_path_edge_boundary(self): """Check edge boundaries in path graph.""" P10=self.P10 assert_equal(nx.edge_boundary(P10,[]),[]) assert_equal(nx.edge_boundary(P10,[],[]),[]) assert_equal(nx.edge_boundary(P10,[1,2,3]),[(3, 4)]) assert_equal(sorted(nx.edge_boundary(P10,[4,5,6])),[(4, 3), (6, 7)]) assert_equal(sorted(nx.edge_boundary(P10,[3,4,5,6,7])),[(3, 2), (7, 8)]) assert_equal(nx.edge_boundary(P10,[8,9,10]),[(8, 7)]) assert_equal(sorted(nx.edge_boundary(P10,[4,5,6],[9,10])),[]) assert_equal(nx.edge_boundary(P10,[1,2,3],[3,4,5]) ,[(2, 3), (3, 4)])
def modularity(g, comms): """Comput modularity: Community-centric version.""" Q = 0.0 M = float(g.number_of_edges()) for c, nodes in comms.iteritems(): E_in = len(networkx.edge_boundary(g, nodes, nodes))/2 assert E_in/2 == E_in//2 K_in = sum(g.degree(n) for n in nodes) Q += E_in/(M*1) - (K_in/(2*M))**2 return Q
def cut_size(grid): district_zero = [] district_one = [] for x in grid.nodes(): if grid.node[x]["district"] == 0: district_zero.append(x) else: district_one.append(x) return len(list(nx.edge_boundary(grid, district_zero, district_one)))
def _pre_init(self, pa_name, group, dgraph, fd, boundary_params): """Return a tuple of the form (pa_inputs, pa_outputs, renames) for the PseudoAssembly that would be created given the nodes in group and the given graph. """ # First, find our group boundary self._orig_group_nodes = list(group) + list(boundary_params) allnodes = dgraph.find_prefixed_nodes(self._orig_group_nodes) out_edges = nx.edge_boundary(dgraph, allnodes) in_edges = nx.edge_boundary(dgraph, set(dgraph.nodes()).difference(allnodes)) solver_states = [] if fd is False: for comp in group: # Keep any node marked 'solver_state'. Note, only inputs can # be solver_states. solver_states.extend([ node for node in dgraph.find_prefixed_nodes([comp]) if 'solver_state' in dgraph.node[node] ]) pa_inputs = edges_to_dict(in_edges).values() pa_inputs.extend(solver_states) pa_outputs = set([a[0] for a in out_edges]) renames = {} # Add pseudoassy inputs for varpath in list(flatten_list_of_iters(pa_inputs)) + \ list(pa_outputs): varname = to_PA_var(varpath, pa_name) if varpath in dgraph: renames[varpath] = varname old = dgraph.base_var(varpath) if old != varpath: renames[old] = to_PA_var(old, pa_name) # make boundary params outputs of the PA pa_outputs.update(boundary_params) return pa_inputs, pa_outputs, renames
def isoperimetricscore(): sum = 0 for g in DList: p = len(nx.edge_boundary(G, g.nodes()))**2 q = len(g.nodes()) if q != 0: sum = sum + float(p) / float(q) else: sum += 10000000 return sum
def sample_graph(self, enforce_min_degree=False, enforce_connected_comps=False): g2 = networkx.Graph() for i in self.state.partition.keys(): edges_within = self.state.edge_count(i) nodes_i = self.state.get_nodes(i) g2.add_nodes_from(nodes_i) assert edges_within <= len(nodes_i) * (len(nodes_i) - 1) / 2 and edges_within >= 0 while edges_within > 0: u = random.choice(nodes_i) v = random.choice(nodes_i) if u == v or g2.has_edge(u, v): continue else: g2.add_edge(u, v) edges_within -= 1 for j in self.state.neighbors(i): if i <= j: continue edges_between = self.state.edge_count(i, j) nodes_j = self.state.get_nodes(j) assert edges_between <= len(nodes_i) * len( nodes_j) and edges_between >= 0 while edges_between > 0: u = random.choice(nodes_i) v = random.choice(nodes_j) if g2.has_edge(u, v): continue else: g2.add_edge(u, v) edges_between -= 1 if enforce_min_degree: self.min_degree(g2) if enforce_connected_comps: self.conn_comps(g2) if debug_sampling: #import pdb print "Checking sampled graph" for i in self.state.partition: nodes_i = self.state.get_nodes(i) assert self.state.edge_count(i) == len( networkx.subgraph(g2, nodes_i).edges()) for j in self.state.partition: if i <= j: continue nodes_j = self.state.get_nodes(j) assert self.state.edge_count(i, j) == len( networkx.edge_boundary(g2, nodes_i, nodes_j)) return g2
def test_path_graph(self): P10 = cnlti(nx.path_graph(10), first_label=1) assert_equal(list(nx.edge_boundary(P10, [])), []) assert_equal(list(nx.edge_boundary(P10, [], [])), []) assert_equal(list(nx.edge_boundary(P10, [1, 2, 3])), [(3, 4)]) assert_equal(sorted(nx.edge_boundary(P10, [4, 5, 6])), [(4, 3), (6, 7)]) assert_equal(sorted(nx.edge_boundary(P10, [3, 4, 5, 6, 7])), [(3, 2), (7, 8)]) assert_equal(list(nx.edge_boundary(P10, [8, 9, 10])), [(8, 7)]) assert_equal(sorted(nx.edge_boundary(P10, [4, 5, 6], [9, 10])), []) assert_equal(list(nx.edge_boundary(P10, [1, 2, 3], [3, 4, 5])), [(2, 3), (3, 4)])
def _pre_init(self, pa_name, group, dgraph, fd, boundary_params): """Return a tuple of the form (pa_inputs, pa_outputs, renames) for the PseudoAssembly that would be created given the nodes in group and the given graph. """ # First, find our group boundary self._orig_group_nodes = list(group) + list(boundary_params) allnodes = dgraph.find_prefixed_nodes(self._orig_group_nodes) out_edges = nx.edge_boundary(dgraph, allnodes) in_edges = nx.edge_boundary(dgraph, set(dgraph.nodes()).difference(allnodes)) solver_states = [] if fd is False: for comp in group: # Keep any node marked 'solver_state'. Note, only inputs can # be solver_states. solver_states.extend([node for node in dgraph.find_prefixed_nodes([comp]) if 'solver_state' in dgraph.node[node]]) pa_inputs = edges_to_dict(in_edges).values() pa_inputs.extend(solver_states) pa_outputs = set([a[0] for a in out_edges]) renames = {} # Add pseudoassy inputs for varpath in list(flatten_list_of_iters(pa_inputs)) + \ list(pa_outputs): varname = to_PA_var(varpath, pa_name) if varpath in dgraph: renames[varpath] = varname old = dgraph.base_var(varpath) if old != varpath: renames[old] = to_PA_var(old, pa_name) # make boundary params outputs of the PA pa_outputs.update(boundary_params) return pa_inputs, pa_outputs, renames
def set_boundary_degrees(g, sg): # TODO: test this!! boundary_degree = {n: 0 for n in sg.nodes() } # by default every boundary degree is 0 for u, v in nx.edge_boundary(g, sg.nodes()): if sg.has_node(u): boundary_degree[u] += g.number_of_edges(u, v) else: boundary_degree[v] += g.number_of_edges(u, v) nx.set_node_attributes(sg, values=boundary_degree, name='b_deg')
def test_complete_graph(self): K10 = cnlti(nx.complete_graph(10), first_label=1) ilen = lambda iterable: sum(1 for i in iterable) assert_equal(list(nx.edge_boundary(K10, [])), []) assert_equal(list(nx.edge_boundary(K10, [], [])), []) assert_equal(ilen(nx.edge_boundary(K10, [1, 2, 3])), 21) assert_equal(ilen(nx.edge_boundary(K10, [4, 5, 6, 7])), 24) assert_equal(ilen(nx.edge_boundary(K10, [3, 4, 5, 6, 7])), 25) assert_equal(ilen(nx.edge_boundary(K10, [8, 9, 10])), 21) assert_equal(sorted(nx.edge_boundary(K10, [4, 5, 6], [9, 10])), [(4, 9), (4, 10), (5, 9), (5, 10), (6, 9), (6, 10)]) assert_equal(sorted(nx.edge_boundary(K10, [1, 2, 3], [3, 4, 5])), [(1, 3), (1, 4), (1, 5), (2, 3), (2, 4), (2, 5), (3, 4), (3, 5)])
def boundary_nodes(graph, nodes): # TODO: move to utils #TODO: use networkx boundary nodes directly: does the same thing """ returns nodes at boundary of G based on edge_boundary from networkx """ graph = unwrap_graph(graph) nodes = list(nodes) nbunch = list(unwrap_nodes(nodes)) # find boundary b_edges = nx.edge_boundary(graph, nbunch) # boundary edges internal_nodes = [s for (s, t) in b_edges] assert(all(n in nbunch for n in internal_nodes)) # check internal return wrap_nodes(graph, internal_nodes)
def test_k10_edge_boundary(self): """Check edge boundaries in K10""" K10=self.K10 assert_equal(nx.edge_boundary(K10,[]),[]) assert_equal(nx.edge_boundary(K10,[],[]),[]) assert_equal(len(nx.edge_boundary(K10,[1,2,3])),21) assert_equal(len(nx.edge_boundary(K10,[4,5,6,7])),24) assert_equal(len(nx.edge_boundary(K10,[3,4,5,6,7])),25) assert_equal(len(nx.edge_boundary(K10,[8,9,10])),21) assert_equal(sorted(nx.edge_boundary(K10,[4,5,6],[9,10])), [(4, 9), (4, 10), (5, 9), (5, 10), (6, 9), (6, 10)]) assert_equal(nx.edge_boundary(K10,[1,2,3],[3,4,5]), [(1, 3), (1, 4), (1, 5), (2, 3), (2, 4), (2, 5), (3, 4), (3, 5)])
def sample_graph(self, enforce_min_degree=False, enforce_connected_comps=False): g2 = networkx.Graph() for i in self.state.partition.keys(): edges_within = self.state.edge_count(i) nodes_i = self.state.get_nodes(i) g2.add_nodes_from(nodes_i) assert edges_within <= len(nodes_i) * (len(nodes_i)-1)/2 and edges_within >= 0 while edges_within > 0: u = random.choice(nodes_i) v = random.choice(nodes_i) if u == v or g2.has_edge(u,v): continue else: g2.add_edge(u,v) edges_within -= 1 for j in self.state.neighbors(i): if i <= j: continue edges_between = self.state.edge_count(i,j) nodes_j = self.state.get_nodes(j) assert edges_between <= len(nodes_i) * len(nodes_j) and edges_between >= 0 while edges_between > 0: u = random.choice(nodes_i) v = random.choice(nodes_j) if g2.has_edge(u,v): continue else: g2.add_edge(u,v) edges_between -= 1 if enforce_min_degree: self.min_degree(g2) if enforce_connected_comps: self.conn_comps(g2) if debug_sampling: #import pdb print "Checking sampled graph" for i in self.state.partition: nodes_i = self.state.get_nodes(i) assert self.state.edge_count(i) == len(networkx.subgraph(g2, nodes_i).edges()) for j in self.state.partition: if i <= j: continue nodes_j = self.state.get_nodes(j) assert self.state.edge_count(i,j) == len(networkx.edge_boundary(g2, nodes_i, nodes_j)) return g2
def boundary_nodes(G, nodes): #TODO: move to utils """ returns nodes at boundary of G TODO: check works for both directed and undirected graphs based on edge_boundary from networkx """ import autonetkit.ank as ank_utils graph = ank_utils.unwrap_graph(G) nodes = list(nodes) nbunch = list(ank_utils.unwrap_nodes(nodes)) # find boundary b_edges = nx.edge_boundary(graph, nbunch) # boundary edges internal_nodes = [s for (s, t) in b_edges] assert(all(n in nbunch for n in internal_nodes)) # check internal return ank_utils.wrap_nodes(G, internal_nodes)
def best_edge(component): """Returns the optimum (minimum or maximum) edge on the edge boundary of the given set of nodes. A return value of ``None`` indicates an empty boundary. """ # TODO In Python 3.4 and later, we can just do # # boundary = nx.edge_boundary(G, component, data=weight) # return opt(boundary, key=lambda e: e[-1][weight], default=None) # # which is better because it doesn't require creating a list. boundary = list(nx.edge_boundary(G, component, data=True)) if not boundary: return None return opt(boundary, key=lambda e: e[-1][weight])
def deltaQ(g, c1, c2): """Change of modularity if c1 and c2 were merged""" # The formula for modulary change in the paper is deceiving. # Derive it yourself: # deltaQ = E12/M - 2*K1*K2/(2M)^2 M = float(g.number_of_edges()) E12 = len(networkx.edge_boundary(g, comms[c1], comms[c2])) #E1 = g.subgraph(comms[c1]).number_of_edges() #E2 = g.subgraph(comms[c2]).number_of_edges() K1 = sum(g.degree(n) for n in comms[c1]) K2 = sum(g.degree(n) for n in comms[c2]) #dQ = 2*(eij - ai*aj) dQ = E12/M - 2*K1*K2/(2*M)**2 return dQ # Newman method: e12 = E12/M a1 = (K1 - E1)/M a2 = (K2 - E2)/M return e12 - a1*a2
def best_edge(component): """Returns the optimum (minimum or maximum) edge on the edge boundary of the given set of nodes. A return value of ``None`` indicates an empty boundary. """ sign = 1 if minimum else -1 minwt = float('inf') boundary = None for e in nx.edge_boundary(G, component, data=True): wt = e[-1].get(weight, 1) * sign if isnan(wt): if ignore_nan: continue msg = "NaN found as an edge weight. Edge %s" raise ValueError(msg % (e,)) if wt < minwt: minwt = wt boundary = e return boundary
def aggregate_nodes(overlay_graph, nodes, retain = []): """Combines connected into a single node""" try: retain.lower() retain = [retain] # was a string, put into list except AttributeError: pass # already a list nodes = list(unwrap_nodes(nodes)) graph = unwrap_graph(overlay_graph) subgraph = graph.subgraph(nodes) if not len(subgraph.edges()): #print "Nothing to aggregate for %s: no edges in subgraph" pass total_added_edges = [] for component_nodes in nx.connected_components(subgraph): if len(component_nodes) > 1: base = component_nodes.pop() # choose one base device to retain nodes_to_remove = set(component_nodes) # remaining nodes, set for fast membership test external_edges = nx.edge_boundary(graph, component_nodes) edges_to_add = [] for src, dst in external_edges: # src is the internal node to remove if src == base or dst == base: continue # don't alter edges from base else: if src in nodes_to_remove: # edge from component to outside data = dict( (key, graph[src][dst][key]) for key in retain) edges_to_add.append((base, dst, data)) else: # edge from outside into component data = dict( (key, graph[dst][src][key]) for key in retain) edges_to_add.append((base, src, data)) graph.add_edges_from(edges_to_add) total_added_edges += edges_to_add graph.remove_nodes_from(nodes_to_remove) return wrap_edges(overlay_graph, total_added_edges)
def main(argv): # Partitionfile partitionfile = "data/partitions/final_partitions_p100_200_0.2.csv" project = "584" reverse = False # Read in Networks FF_all = nx.read_edgelist( "data/networks/%s_FF.edgelist" % project, nodetype=str, data=(("weight", float),), create_using=nx.DiGraph() ) AT_all = nx.read_edgelist( "data/networks/%s_solr_AT.edgelist" % project, nodetype=str, data=(("weight", float),), create_using=nx.DiGraph(), ) RT_all = nx.read_edgelist( "data/networks/%s_solr_RT.edgelist" % project, nodetype=str, data=(("weight", float),), create_using=nx.DiGraph(), ) try: opts, args = getopt.getopt(argv, "r") except getopt.GetoptError: print "edges.py -r [if you want to reverse the AT<-->RT tie direction ]" for opt, arg in opts: if opt in ("-r"): print "Calculating the influence of outgoing AT ties on incoming RT ties" reverse = True # Output summary_csv_writer = csv.writer(open("results/spss/edges/%s_edges_summary.csv" % project, "wb")) summary_csv_writer.writerow(["Community", "Retweets Inside Community", "Retweets between Communities"]) if reverse: bridging_csv_writer = csv.writer( open("results/spss/edges/%s_reverse_bridging_edges.csv" % project, "wb") ) # reverse bonding_csv_writer = csv.writer( open("results/spss/edges/%s_reverse_bonding_edges.csv" % project, "wb") ) # reverse else: bridging_csv_writer = csv.writer(open("results/spss/edges/%s_bridging_edges.csv" % project, "wb")) bonding_csv_writer = csv.writer(open("results/spss/edges/%s_bonding_edges.csv" % project, "wb")) # Read in the partitions tmp = hp.get_partition(partitionfile) partitions = tmp[0] groups = tmp[1] ff_bridging_edges = defaultdict(dict) ff_bonding_edges = defaultdict(dict) at_bridging_edges = defaultdict(dict) at_bonding_edges = defaultdict(dict) rt_bridging_edges = defaultdict(list) rt_bonding_edges = defaultdict(list) total_bridging_edges = 0 total_bonding_edges = 0 i = 0 for partition in partitions: ################ FF Edges ###################### # Collect the FF edges between groups for edge in nx.edge_boundary(FF_all, partition): if FF_all.has_edge(edge[1], edge[0]): ff_bridging_edges[edge[0]][edge[1]] = "ff_recip" else: ff_bridging_edges[edge[0]][edge[1]] = "ff_non_recip" # Collect the FF edges inside the group for edge in FF_all.subgraph(partition).edges(): if FF_all.has_edge(edge[1], edge[0]): ff_bonding_edges[edge[0]][edge[1]] = "ff_recip" else: ff_bonding_edges[edge[0]][edge[1]] = "ff_non_recip" ################ AT Edges ###################### # TODO its missing the reciprocated edges that have a weight > 1 # Idea 1: We might simply add up the incoming and outgoing edges to a total weight # Collect the AT edges that are between groups for edge in nx.edge_boundary(AT_all, partition): if AT_all.has_edge(edge[1], edge[0]): if AT_all.get_edge_data(*edge)["weight"] == 1: at_bridging_edges[edge[0]][edge[1]] = "at_recip" else: if AT_all.get_edge_data(*edge)["weight"] == 1: at_bridging_edges[edge[0]][edge[1]] = "at_non_recip_w1" else: at_bridging_edges[edge[0]][edge[1]] = AT_all.get_edge_data(*edge)["weight"] # Collect the AT edges that are inside the group for edge in AT_all.subgraph(partition).edges(): if AT_all.has_edge(edge[1], edge[0]): if AT_all.get_edge_data(*edge)["weight"] == 1: at_bonding_edges[edge[0]][edge[1]] = "at_recip" else: if AT_all.get_edge_data(*edge)["weight"] == 1: at_bonding_edges[edge[0]][edge[1]] = "at_non_recip_w1" else: at_bonding_edges[edge[0]][edge[1]] = AT_all.get_edge_data(*edge)["weight"] ################ RT Edges ###################### # Collect the RT edges between groups: tmp_rt_bridging_edges = 0 for edge in nx.edge_boundary(RT_all, partition): tmp_rt_bridging_edges += RT_all.get_edge_data(*edge)["weight"] rt_bridging_edges[RT_all.get_edge_data(*edge)["weight"]].append((edge[0], edge[1])) total_bridging_edges += tmp_rt_bridging_edges # Collect the RT edges inside group tmp_rt_bonding_edges = 0 for edge in RT_all.subgraph(partition).edges(): tmp_rt_bonding_edges += RT_all.get_edge_data(*edge)["weight"] rt_bonding_edges[RT_all.get_edge_data(*edge)["weight"]].append((edge[0], edge[1])) total_bonding_edges += tmp_rt_bonding_edges summary_csv_writer.writerow([groups[i], tmp_rt_bonding_edges, tmp_rt_bridging_edges]) print "Community %s, Total Retweets inside: %s, Total Retweets between %s" % ( groups[i], tmp_rt_bonding_edges, tmp_rt_bridging_edges, ) i += 1 print "Total Bonding Edges %s" % total_bonding_edges print "Total Bridging Edges %s" % total_bridging_edges ##################BONDING: Influence of AT strengths on bonding retweets ############################## bonding_flow = defaultdict(list) for rt_strength, retweets in rt_bonding_edges.iteritems(): for retweet in retweets: value = None try: if reverse: value = at_bonding_edges[retweet[1]][retweet[0]] # Reverse del at_bonding_edges[retweet[1]][retweet[0]] # delete that entry reverse else: value = at_bonding_edges[retweet[0]][retweet[1]] # Same direction del at_bonding_edges[retweet[0]][retweet[1]] # delete that entry same direction except: "" if value == None: # If the AT Network led to no diffusion ONLY then check the FF network try: if reverse: value = ff_bonding_edges[retweet[1]][retweet[0]] # Reverse del ff_bonding_edges[retweet[1]][retweet[0]] # delete that entry reverse else: value = ff_bonding_edges[retweet[0]][retweet[1]] # Same direction del ff_bonding_edges[retweet[0]][retweet[1]] # delete that entry same direction except: "" if value == None: # A retweet happend despite there being no ties at all value = "no_tie" bonding_flow[value].append(rt_strength) bonding_no_flow = {} # Count the AT ties that led to no diffusion for k, v1 in at_bonding_edges.iteritems(): for k, value in v1.iteritems(): if bonding_no_flow.has_key(value): bonding_no_flow[value] += 1 else: bonding_no_flow[value] = 0 # Count the FF ties that led to no diffusion for k, v1 in ff_bonding_edges.iteritems(): for k, value in v1.iteritems(): if bonding_no_flow.has_key(value): bonding_no_flow[value] += 1 else: bonding_no_flow[value] = 0 ##################BRIDGING: Influence of AT strenghts on bridging retweets ############################## bridging_flow = defaultdict(list) for rt_strength, retweets in rt_bridging_edges.iteritems(): for retweet in retweets: value = None try: if reverse: value = at_bridging_edges[retweet[1]][retweet[0]] # reverse del at_bridging_edges[retweet[1]][retweet[0]] # delete that entry reverse else: value = at_bridging_edges[retweet[0]][retweet[1]] # Same direction del at_bridging_edges[retweet[0]][retweet[1]] # delete that entry same direction except: "" if value == None: # If the AT Network led to no diffusion ONLY then check the FF network try: if reverse: value = ff_bridging_edges[retweet[1]][retweet[0]] # Reverse del ff_bridging_edges[retweet[1]][retweet[0]] # delete that entry reverse else: value = ff_bridging_edges[retweet[0]][retweet[1]] # Same direction del ff_bridging_edges[retweet[0]][retweet[1]] # delete that entry same direction except: "" if value == None: # A retweet happend despite there being no ties at all value = "no_tie" bridging_flow[value].append(rt_strength) bridging_no_flow = {} # Count the AT ties that led to no diffusion for k, v1 in at_bridging_edges.iteritems(): for k, value in v1.iteritems(): if bridging_no_flow.has_key(value): bridging_no_flow[value] += 1 else: bridging_no_flow[value] = 0 # Count the FF ties that led to no diffusion for k, v1 in ff_bridging_edges.iteritems(): for k, value in v1.iteritems(): if bridging_no_flow.has_key(value): bridging_no_flow[value] += 1 else: bridging_no_flow[value] = 0 ########################### Output ########################### bridging_csv_writer.writerow( [ "bridging_tie_type", "#_ties_w_retweets", "#_ties_w_o_retweets", "#_retweets", "%_of_total", "retweets/#_ties_w_o_retweets", "retweets/#_ties_w_retweets", "std", ] ) bonding_csv_writer.writerow( [ "bonding_tie_type", "#_ties_w_retweets", "#_ties_w_o_retweets", "#_retweets", "%_of_total", "retweets/#_ties_w_o_retweets", "retweets/#_ties_w_retweets", "std", ] ) # BRIDGING TIES bridging_total = [val for subl in bridging_flow.values() for val in subl] bridging_noflow_total = sum(bridging_no_flow.values()) for k, v in bridging_flow.iteritems(): if bridging_no_flow.has_key(k) and bridging_no_flow[k] != 0 and len(bridging_flow[k]) > 5: ratio = sum(bridging_flow[k]) / bridging_no_flow[k] of_total = sum(bridging_flow[k]) / float(sum(bridging_total)) std = np.std(bridging_flow[k]) average = np.average(bridging_flow[k]) bridging_csv_writer.writerow( [k, len(bridging_flow[k]), bridging_no_flow[k], sum(bridging_flow[k]), of_total, ratio, average, std] ) if k == "no_tie": std = np.std(bridging_flow[k]) average = np.average(bridging_flow[k]) bridging_csv_writer.writerow([k, len(bridging_flow[k]), 0, sum(bridging_flow[k]), 0, 0, average, std]) std = np.std(bridging_total) average = np.average(bridging_total) bridging_csv_writer.writerow( [ "total", len(bridging_total), bridging_noflow_total, sum(bridging_total), 1, sum(bridging_total) / float(bridging_noflow_total), average, std, ] ) # BONDING TIES bonding_total = [val for subl in bonding_flow.values() for val in subl] bonding_noflow_total = sum(bonding_no_flow.values()) for k, v in bonding_flow.iteritems(): if bonding_no_flow.has_key(k) and bonding_no_flow[k] != 0 and len(bonding_flow[k]) > 5: ratio = sum(bonding_flow[k]) / bonding_no_flow[k] of_total = sum(bridging_flow[k]) / float(sum(bonding_total)) std = np.std(bonding_flow[k]) average = np.average(bonding_flow[k]) bonding_csv_writer.writerow( [k, len(bonding_flow[k]), bonding_no_flow[k], sum(bonding_flow[k]), of_total, ratio, average, std] ) if k == "no_tie": std = np.std(bonding_flow[k]) average = np.average(bonding_flow[k]) bonding_csv_writer.writerow([k, len(bonding_flow[k]), 0, sum(bonding_flow[k]), 0, 0, average, std]) std = np.std(bonding_total) average = np.average(bonding_total) bonding_csv_writer.writerow( [ "total", len(bonding_total), bonding_noflow_total, sum(bonding_total), 1, sum(bonding_total) / float(bonding_no_flow_total), average, std, ] )
def draw_cut_graph(G, partition_dict=None, pos=None, node_size=1000, edge_width=3, font_size=12, node_label=True, title=''): """Draw a cut graph G using Matplotlib.""" if partition_dict: nx.set_node_attributes(G, gc.PARTITION, partition_dict) if not pos: pos = nx.circular_layout(G, scale=20) blue_nodes, black_nodes, undecided_nodes, marked_nodes = gc.get_partitions(G) # Draw nodes and edges of the first partition nx.draw_networkx_nodes(G, pos, blue_nodes, node_size=node_size, node_color='blue') nx.draw_networkx_edges(G, pos, nx.edge_boundary(G, blue_nodes, blue_nodes), width=edge_width, edge_color='blue') # Draw nodes and edges of the second partition nx.draw_networkx_nodes(G, pos, black_nodes, node_size=node_size, node_color='black') nx.draw_networkx_edges(G, pos, nx.edge_boundary(G, black_nodes, black_nodes), width=edge_width, edge_color='black') # Draw undecided nodes and edges nx.draw_networkx_nodes(G, pos, undecided_nodes, node_size=node_size, node_color='magenta') nx.draw_networkx_edges(G, pos, nx.edge_boundary(G, undecided_nodes, undecided_nodes), width=edge_width, edge_color='magenta') nx.draw_networkx_edges(G, pos, nx.edge_boundary(G, blue_nodes, undecided_nodes), width=edge_width, style='dotted', edge_color='magenta') nx.draw_networkx_edges(G, pos, nx.edge_boundary(G, undecided_nodes, black_nodes), width=edge_width, style='dotted', edge_color='magenta') # Draw marked nodes and edges nx.draw_networkx_nodes(G, pos, marked_nodes, node_size=node_size, node_color='red') nx.draw_networkx_edges(G, pos, nx.edge_boundary(G, marked_nodes, marked_nodes), width=edge_width, edge_color='red') #Draw edges beetween marked and unmarked nx.draw_networkx_edges(G, pos, nx.edge_boundary(G, marked_nodes, blue_nodes), width=edge_width, edge_color='orange') nx.draw_networkx_edges(G, pos, nx.edge_boundary(G, marked_nodes, black_nodes), width=edge_width, edge_color='orange') nx.draw_networkx_edges(G, pos, nx.edge_boundary(G, marked_nodes, undecided_nodes), width=edge_width, edge_color='orange') # Draw cut edges nx.draw_networkx_edges(G, pos, nx.edge_boundary(G, blue_nodes, black_nodes), width=edge_width, style='dashed', edge_color='gray') if node_label: nx.draw_networkx_labels(G, pos, font_color='white', font_size=font_size, font_weight='bold') plt.title(title) plt.axis('off')
# Do a clustering based on positive/negative Laplacian values: print "Laplacian based clustering:" print "Zeroth eigenvalue (should be zero):", ev[ev_ranks[0]] print "First eigenvasue:", ev[ev_ranks[1]] evec1 = evec[:,ev_ranks[1]] group1 = [ nodes[i] for i in range(len(evec1)) if evec1[i] >= 0 ] group2 = [ nodes[i] for i in range(len(evec1)) if evec1[i] < 0 ] print "Group sizes, laplacian positive/negative split:", len(group1), len(group2) # # Do a check to ensure that all nodes were partitioned assert set(group1) | set(group2) == set(g.nodes()) # Compute the boundary edges: boundary_edges = networkx.edge_boundary(g, group1) print "Laplacian cut size is:", len(boundary_edges) # This should be reversible - boundary of group1 should equal boundary # of group 2. Do a test, to ensure that we used our tools correctly. assert len(boundary_edges) == len(networkx.edge_boundary(g, group2)) print "group1:", print_group(group1) print "group2:", print_group(group2) print # Do a clustering where the lowest 16 eigenvalues are in one cluster, # and then below, a clustering where the lowest 18 eigenvalues are in # one cluster. ranks = numpy.argsort(evec1) print "Clustering based on 16 lowest values of eigenvalue 1"
def boundary_edges(self, nbunch, nbunch2 = None): nbunch = (n.node_id for n in nbunch) # only store the id in overlay return iter(overlay_edge(self, src, dst) for (src, dst) in nx.edge_boundary(self._graph, nbunch, nbunch2))
def aggregate_nodes(NmGraph, nodes, retain = []): """Combines connected into a single node""" try: retain.lower() retain = [retain] # was a string, put into list except AttributeError: pass # already a list nodes = list(unwrap_nodes(nodes)) graph = unwrap_graph(NmGraph) subgraph = graph.subgraph(nodes) if not len(subgraph.edges()): #print "Nothing to aggregate for %s: no edges in subgraph" pass total_added_edges = [] if graph.is_directed(): component_nodes_list = nx.strongly_connected_components(subgraph) else: component_nodes_list = nx.connected_components(subgraph) for component_nodes in component_nodes_list: if len(component_nodes) > 1: base = component_nodes.pop() # choose one base device to retain nodes_to_remove = set(component_nodes) # remaining nodes, set for fast membership test external_edges = nx.edge_boundary(graph, component_nodes) edges_to_add = [] for src, dst in external_edges: # src is the internal node to remove if src == base or dst == base: continue # don't alter edges from base else: if src in nodes_to_remove: # edge from component to outside interfaces = graph[src][dst]["_interfaces"] dst_int_id = interfaces[dst] data = dict( (key, graph[src][dst][key]) for key in retain) data['_interfaces'] = {dst: dst_int_id} edges_to_add.append((base, dst, data)) if graph.is_directed(): # other direction #TODO: check which data should be copied dst_data = dict( (key, graph[src][dst][key]) for key in retain) dst_data['_interfaces'] = {dst: dst_int_id} edges_to_add.append((dst, base, dst_data)) else: # edge from outside into component interfaces = graph[dst][src]["_interfaces"] src_int_id = interfaces[src] data = dict( (key, graph[dst][src][key]) for key in retain) data['_interfaces'] = {src: src_int_id} edges_to_add.append((base, src, data)) if graph.is_directed(): # other direction #TODO: check which data should be copied dst_data = dict( (key, graph[src][dst][key]) for key in retain) dst_data['_interfaces'] = {src: src_int_id} edges_to_add.append((src, base, dst_data)) graph.add_edges_from(edges_to_add) total_added_edges += edges_to_add graph.remove_nodes_from(nodes_to_remove) return wrap_edges(NmGraph, total_added_edges)