def _assert_graphs_nx_equal(g1: nx.Graph, g2: nx.Graph): # Check number of nodes and edges assert g1.number_of_nodes() == g2.number_of_nodes() assert g1.number_of_edges() == g2.number_of_edges() # Check node features for (node_id_1, node_features_1), (node_id_2, node_features_2) in \ zip(g1.nodes(data='features'), g2.nodes(data='features')): assert node_id_1 == node_id_2 assert (node_features_1 is not None) == (node_features_2 is not None) if node_features_1 is not None and node_features_2 is not None: torch.testing.assert_allclose(node_features_1, node_features_2) # Check edge features for (sender_id_1, receiver_id_1, edge_features_1), (sender_id_2, receiver_id_2, edge_features_2) in \ zip(g1.edges(data='features'), g2.edges(data='features')): assert sender_id_1 == sender_id_2 assert receiver_id_1 == receiver_id_2 assert (edge_features_1 is not None) == (edge_features_2 is not None) if edge_features_1 is not None and edge_features_2 is not None: torch.testing.assert_allclose(edge_features_1, edge_features_2) # Check graph features assert has_global_features(g1) == has_global_features(g2) if has_global_features(g1) and has_global_features(g2): torch.testing.assert_allclose(g1.graph['features'], g2.graph['features'])
def minimum_vertex_cover(graph: nx.Graph) -> set[int]: """Generates a minimum vertex cover problem for the given graph, 'graph'. That is, a minimum subset of the nodes in 'graph' such that all edges in 'graph' has at least one endpoint in the subset. Args: graph (nx.Graph): A graph Returns: A set of nodes constituting the minimum vertex cover. """ bqm = generate_minimum_vertex_cover_bqm(graph) sample_label = "Minimum vertex cover (" + str( graph.number_of_nodes()) + " nodes, " + str( graph.number_of_edges()) + " edges)" minimum_vertex_cover_info = sampler.sample_dwave(bqm, sample_label) assert ( graph.number_of_nodes() == len(minimum_vertex_cover_info) ), "Something went wrong... the minimum vertex cover info doesn't match the input graph." minimum_vertex_cover: set[int] = set() for node in graph.nodes: if (minimum_vertex_cover_info[node]): minimum_vertex_cover.add(node) assert (is_vertex_cover(graph, minimum_vertex_cover) ), "The subset of nodes does not constitute a cover" return minimum_vertex_cover
def generate_minimum_vertex_cover_bqm( graph: nx.Graph) -> dimod.BinaryQuadraticModel: """Generates an instance of BinaryQuadraticModel with a QUBO formulation of the minimum vertex cover for the graph given as argument. The implementation is based on Fred Glover et al: "Quantum Bridge Analytics I: A Tutorial on Formulating and Using QUBO Models", arXiv:1811.11538 Args: graph (nx.Graph): A graph Returns: An instance of BinaryQuadraticModel with a QUBO formulation of the minimum vertex cover problem for 'graph'. """ linear = np.zeros(shape=graph.number_of_nodes()) quadratic = np.zeros(shape=(graph.number_of_nodes(), graph.number_of_nodes())) penalty = math.ceil(1.5 * graph.number_of_nodes()) for (i, j) in graph.edges: linear[i] += -penalty linear[j] += -penalty quadratic[i, j] = penalty for i in graph.nodes: linear[i] += 1 offset = 0.0 vartype = dimod.BINARY return dimod.BinaryQuadraticModel(linear, quadratic, offset, vartype)
def __init__(self, graph: nx.Graph, dimension: int, seed: int, iterations: int, learning_rate: float, alpha: float, beta: float, gamma: float, batch_size: int, layers: t.List, dropouts: t.List) -> None: super().__init__(graph, dimension) self._network = None self._batch_size = batch_size self._dimension = dimension self._iterations = iterations self._adj_matrix = nx.to_scipy_sparse_matrix(graph, dtype=float) self._network = sdn.SdneNetwork( seed, dimension, layers, dropouts, graph.number_of_nodes(), gamma ) self._network.construct(dimension, graph.number_of_nodes(), learning_rate, alpha, beta, tf.train.AdamOptimizer)
def __init__(self, G: nx.Graph, s: int): """ 初始化图和起点 """ self.dist_to = [None] * G.number_of_nodes() self._edge_to = [None] * G.number_of_nodes() self.__s = s self.__G = G self.__bfs()
def getFitness(graph: nx.Graph, chromosome: list): numColors = len(set(chromosome)) totalFitness = graph.number_of_nodes() * numColors for edge in graph.edges(): if chromosome[edge[0]] == chromosome[edge[1]]: totalFitness += numColors if totalFitness == graph.number_of_nodes() * numColors: return totalFitness, True return totalFitness, False
def genarate_dataset(graph: nx.Graph) -> (pd.DataFrame, nx.Graph): print("===================") # get all unlinked edges and sample from them (negative entries) print("generating negative entries") node_num = graph.number_of_nodes() node_list = list(graph.nodes()) node_1, node_2 = list(), list() for i in tqdm(range(node_num)): for j in range(i): if not graph.has_edge(i, j): node_1.append(i) node_2.append(j) unlinked = pd.DataFrame({"node_1": node_1, "node_2": node_2}) index = random.sample(range(len(node_1)), k=int(len(node_1) / 100)) unlinked = unlinked.loc[index] unlinked['link'] = 0 print("{} negative entries".format(int(len(node_1) / 100))) # get all existing edges print("generating positive entries") node_1, node_2 = list(), list() for u, v in graph.edges(): node_1.append(u) node_2.append(v) linked = pd.DataFrame({"node_1": node_1, "node_2": node_2}) # get all removeable egdes (positive entries) # that is, removing this edge will not disconnect the graph temp = linked.copy() initial_node_count = graph.number_of_nodes() omissible_links = [] for i in tqdm(linked.index.values): # remove a node pair and build a new graph G_temp = nx.from_pandas_edgelist(linked.drop(index=i), source="node_1", target="node_2", create_using=nx.Graph()) # check whether removing this egde splits the graph if (nx.number_connected_components(G_temp) == 1) and (len( G_temp.nodes) == initial_node_count): omissible_links.append(i) temp = temp.drop(index=i) print("{} positive entries".format(len(omissible_links))) # total dataset data = linked.loc[omissible_links] data['link'] = 1 data = data.append(unlinked[['node_1', 'node_2', 'link']]) print("===================") print("Dataset info:") print(data['link'].value_counts()) print("===================") data = data.reset_index().drop('index', axis=1) return data, G_temp
def getConvertedNodes(graph : networkx.Graph): convertedNodes = [0] * graph.number_of_nodes() convertedEdges = getConvertedEdges(graph) for i in range(graph.number_of_nodes()): convertedNodes[i] = Node.Node(putAsciiValues(i, graph.number_of_nodes()), convertedEdges[i]) return convertedNodes
def get_adj_matrix(graph : nx.Graph) -> np.array : shape = (graph.number_of_nodes(),graph.number_of_nodes()) adj_matrix = np.zeros(shape=shape,dtype=np.float) nodes =graph.nodes() for i_index in range(nodes.__len__()) : node = nodes[i_index] for neighbor in graph.neighbors(nodes[i_index]): j_index = nodes.index(neighbor) adj_matrix[i_index][j_index] = graph[node][neighbor]['weight'] return adj_matrix
def __init__(self, G: nx.Graph): """ 初始化标记数组,同时开始搜索 """ self.__G = G self.__marked = [False] * G.number_of_nodes() self.__color = [False] * G.number_of_nodes() self.__is_two_colorable = True for v in G.nodes: if not self.__marked[v]: self.__dfs(v)
def create_rand_sol(route: nx.Graph, max_num_of_bus=5, min_route_length=2): sol = [] num_of_bus = np.random.randint(1, max_num_of_bus+1) for bus in range(num_of_bus): sol.append([]) route_length = np.random.randint(min_route_length, route.number_of_nodes()+1) actual_node = np.random.randint(1, route.number_of_nodes()) for node in range(route_length): temp_neighbors = list(route.neighbors(actual_node)) actual_node = temp_neighbors[np.random.randint(0, len(temp_neighbors))] sol[bus].append(actual_node) return sol
def write_metis_graph(path: Union[str, Path], graph: nx.Graph, costs: Optional[np.ndarray] = None, *, comments: Optional[List[str]] = None) -> None: """ Writes graph to file. Either writes a unweighted (fmt = 0) or a weighted instance (fmt = 1) when `costs` is given. For weighted instances the output is a fully connected graph. Only the upper triangular adjacency matrix is being written, i.e. all vertex pairs where u < v. The edge weight is negative if no edge is present in the graph. For unweighted instances the output is the graph directly. Parameters ---------- path : Path graph : Graph costs : symmetric costs matrix, optional comments : list of comments, optional References ---------- [1]: https://people.sc.fsu.edu/~jburkardt/data/metis_graph/metis_graph.html """ with Path(path).open('w') as file: if comments is not None: for comment in comments: file.write(f"% {comment}\n") if costs is None: n = graph.number_of_nodes() m = graph.number_of_edges() fmt = 0 file.write(f"{n} {m} {fmt}\n") for u in range(n): file.write(" ".join([f"{v + 1}" for v in graph.neighbors(u)]) + "\n") else: n = graph.number_of_nodes() m = n * (n - 1) // 2 fmt = 1 file.write(f"{n} {m} {fmt}\n") for u in range(n): file.write(" ".join([ f"{v + 1} {costs[u][v] if graph.has_edge(u, v) else -costs[u][v]}" for v in range(u + 1, n) ]) + "\n")
def is_tree(g: Graph) -> bool: if g.number_of_nodes() - 1 != g.number_of_edges(): return False visited = set() queue = {next(g.nodes.__iter__())} while len(queue): current = queue.pop() visited.add(current) for v in g.neighbors(current): if v not in visited: visited.add(v) queue.add(v) return len(visited) == g.number_of_nodes()
def bellman_ford(graph: nx.Graph, start): vertex_mark = [math.inf for i in range(graph.number_of_nodes())] parent_node = [None for i in range(graph.number_of_nodes())] vertex_mark[start] = 0 for _ in range(graph.number_of_nodes() - 1): for s, d, w in graph.edges.data('weight'): if vertex_mark[s] + w < vertex_mark[d]: vertex_mark[d] = vertex_mark[s] + w parent_node[d] = s if vertex_mark[d] + w < vertex_mark[s]: vertex_mark[s] = vertex_mark[d] + w parent_node[s] = d
def divide(g: Graph): if g.number_of_nodes() == 0: return {} node = next(iter(g.nodes)) queue = [node] division = {node: True} while len(queue): u = queue.pop() for v in g.neighbors(u): if v not in division: division[v] = not division[u] queue.append(v) assert len(division) == g.number_of_nodes() return division
def getConvertedEdges(graph : networkx.Graph): convertedEdges = [[] * (graph.number_of_nodes()) for i in range(graph.number_of_nodes())] #--> Desta forma para evitar que as linhas sejam todas a msm referencia --> https://stackoverflow.com/questions/240178/list-of-lists-changes-reflected-across-sublists-unexpectedly counter = 0 counter2 = 0 for i in range(len(graph.nodes)): counter = 0 for u, v, data in graph.edges(data=True): if u > i: #SE CHEGOU AO FIM DA LISTAGEM DAS EDGES break elif i == u: if i == v: itemArgs = {Utils.INITIALPOINT: putAsciiValues(u, graph.number_of_nodes()), Utils.FINALPOINT: putAsciiValues(v, graph.number_of_nodes()), Utils.DISTANCE: data['weight']} convertedEdges[i].append(Edge.Edge(**itemArgs)) counter = counter + 1 if i != v: itemArgs = {Utils.INITIALPOINT: putAsciiValues(u, graph.number_of_nodes()), Utils.FINALPOINT: putAsciiValues(v, graph.number_of_nodes()), Utils.DISTANCE: data['weight']} convertedEdges[i].append(Edge.Edge(**itemArgs)) itemArgs = {Utils.INITIALPOINT: putAsciiValues(v, graph.number_of_nodes()), Utils.FINALPOINT: putAsciiValues(u, graph.number_of_nodes()), Utils.DISTANCE: data['weight']} convertedEdges[v].append(Edge.Edge(**itemArgs)) else: continue return convertedEdges
def __init__(self, G: nx.Graph): """ 建立图的连通分量信息 """ self.__marked = [False] * G.number_of_nodes() self.__G = G self.__component = [None] * G.number_of_nodes() self.__count = 0 for v in G.nodes: if not self.__marked[v]: self.__dfs(v) self.__count += 1
def graph_stats(G: nx.Graph) -> Dict[str, Any]: stats = dict() n, m = G.number_of_nodes(), G.number_of_edges() stats['number_of_vertices'] = n stats['number_of_edges'] = m stats['complexity'] = n * m stats['density'] = 2 * m / (n * (n - 1)) stats['connected_components'] = [] for G_hat in (G.subgraph(c) for c in nx.connected_components(G)): component_stats = dict() component_stats['number_of_vertices'] = G_hat.number_of_nodes() component_stats['number_of_edges'] = G_hat.number_of_edges() component_stats['diameter'] = nx.diameter(G_hat, usebounds=True) component_stats['radius'] = nx.radius(G_hat, usebounds=True) component_stats['center_size'] = len(nx.center(G_hat, usebounds=True)) component_stats['periphery_size'] = len( nx.periphery(G_hat, usebounds=True)) stats['connected_components'] += [component_stats] stats['number_of_connected_components'] = len( stats['connected_components']) stats['average_clustering_coefficient'] = nx.average_clustering(G) return stats
def branch_and_bound(g: nx.Graph, sub_cycle: list = None, current_min: float = float("inf")) -> int: if sub_cycle is None: sub_cycle = [0] if len(sub_cycle) == g.number_of_nodes(): weight = sum([ g[sub_cycle[i]][sub_cycle[i + 1]]['weight'] for i in range(len(sub_cycle) - 1) ]) weight = weight + g[sub_cycle[-1]][sub_cycle[0]]['weight'] return weight unused_nodes = list() for v in g.nodes(): if v not in sub_cycle: unused_nodes.append((g[sub_cycle[-1]][v]['weight'], v)) unused_nodes = sorted(unused_nodes) for (d, v) in unused_nodes: assert v not in sub_cycle extended_subcycle = sub_cycle[:] extended_subcycle.append(v) if lower_bound(g, extended_subcycle) < current_min: new_sub_cycle = sub_cycle[:] new_sub_cycle.append(v) new_min = branch_and_bound(g, new_sub_cycle, current_min) if new_min < current_min: current_min = new_min return current_min
def route_circuit(circuit: circuits.Circuit, device_graph: nx.Graph, *, algo_name: Optional[str] = None, router: Optional[Callable[..., SwapNetwork]] = None, **kwargs) -> SwapNetwork: """Routes a circuit on a given device. Args: circuit: The circuit to route. device_graph: The device's graph, in which each vertex is a qubit and each edge indicates the ability to do an operation on those qubits. algo_name: The name of a routing algorithm. Must be in ROUTERS. router: The function that actually does the routing. **kwargs: Arguments to pass to the routing algorithm. Exactly one of algo_name and router must be specified. """ if any(protocols.num_qubits(op) > 2 for op in circuit.all_operations()): raise ValueError('Can only route circuits with operations that act on' ' at most 2 qubits.') if len(list(circuit.all_qubits())) > device_graph.number_of_nodes(): raise ValueError('Number of logical qubits is greater than number' ' of physical qubits.') if not (algo_name is None or router is None): raise ValueError( 'At most one of algo_name or router can be specified.') if algo_name is not None: router = ROUTERS[algo_name] elif router is None: raise ValueError(f'No routing algorithm specified.') return router(circuit, device_graph, **kwargs)
def layout(self, graph: nx.Graph) -> np.ndarray: """ Use OpenOrd to compute a graph layout Remarks: Requires OpenOrd to be installed and available in the $PATH """ tempdir = tempfile.mkdtemp() # Save the graph in .int format rootfile = os.path.join(tempdir, "graph") with open(rootfile + ".int", 'w') as f: for (x, y, w) in graph.edges_iter(data='weight'): f.write("%d\t%d\t%f\n" % (x, y, w)) try: _ = check_output([ os.path.join(self.openord_path, "layout"), "-c", str(self.edge_cutting), rootfile ]) except CalledProcessError as e: shutil.rmtree(tempdir) raise e # Read back the coordinates with open(rootfile + ".icoord", 'r') as f: coords = np.zeros((graph.number_of_nodes(), 2)) for line in f: items = line.split("\t") node = int(items[0]) coords[node] = (float(items[1]), float(items[2])) shutil.rmtree(tempdir) return coords
def report_general_statistics(graph: networkx.Graph): nodes = graph.number_of_nodes() edges = graph.number_of_edges() density = edges / (nodes * (nodes - 1) / 2) print("=" * 80) print("""BASIC STATISTICS: Number of nodes: {nodescnt} Number of edges: {edgescnt} Density: {density} Number of components: {components}""".format( nodescnt=graph.number_of_nodes(), edgescnt=graph.number_of_edges(), density=density, components=networkx.number_connected_components(graph))) print("=" * 80)
def relative_stability(g1: nx.Graph, g2: nx.Graph, e1, e2): a1 = np.array(nx.adjacency_matrix(g1).todense()) a2 = np.array(nx.adjacency_matrix(g2).todense()) g1_length = g1.number_of_nodes() sub_node_set = set(g1.nodes).intersection(g2.nodes) f_numerator = 0 s_numerator = 0 s_denominator = 0 f_denominator = 0 for node in sub_node_set: f_numerator += np.sum(np.square(e2[node] - e1[node])) s_numerator += np.sum(np.square(a2[node][:g1_length] - a1[node])) f_denominator += np.square(e1[node]) s_denominator += np.square(a1[node]) f_numerator = np.sqrt(np.sum(f_numerator)) s_numerator = np.sqrt(np.sum(s_numerator)) f_denominator = np.sqrt(np.sum(f_denominator)) s_denominator = np.sqrt(np.sum(s_denominator)) res = (f_numerator / f_denominator) / (s_numerator / s_denominator) return res
def cycle_length(g: nx.Graph, cycle: list) -> int: assert len(cycle) == g.number_of_nodes() weights = 0 for i in range(len(cycle) - 1): weights += g[cycle[i]][cycle[i + 1]]['weight'] weights += g[cycle[-1]][cycle[0]]['weight'] return weights
def refine_kpt(g: nx.Graph, k, kpt_star, epsilon_p, r_p): n = g.number_of_nodes() l = 1 s_p = select_k_max(g.graph["free"], k, r_p) lambda_p = (2 + epsilon_p) * l * n * np.log(n) / (epsilon_p * epsilon_p) theta_p = lambda_p / kpt_star count = 0 s_p = set(s_p) for i in range(int(theta_p)): r = create_rr_set(g) if s_p.intersection(r): count += 1 f = count / int(theta_p) kpt_p = f * n / (1 + epsilon_p) return max(kpt_p, kpt_star)
def kpt_estimation(g: nx.Graph, k): n = g.number_of_nodes() * 1.0 m = g.number_of_edges() * 1.0 # l = 1.1282757460454809 l = 1 for i in range(1, int(np.log2(n))): rr_sets = [] c = ((6.0 * l * np.log(n)) + 6 * np.log(np.log2(n))) * np.power(2.0, i * 1.0) sum_ = 0 for j in range(int(c)): r = create_rr_set(g) # r = create_rr(g) rr_sets.append(r) k_r = 1.0 - np.power((1.0 - rr_set_weight(r, g) / m), k) sum_ += k_r if sum_ / c > 1.0 / np.power(2.0, i): return n * sum_ / (2.0 * c), rr_sets return 1, []
def _assert_graph_and_graph_nx_equals(graph: tg.Graph, graph_nx: nx.Graph): # Check number of nodes and edges assert graph_nx.number_of_nodes() == graph.num_nodes assert graph_nx.number_of_edges() == graph.num_edges # Check node features assert has_node_features(graph) == has_node_features(graph_nx) if has_node_features(graph) and has_node_features(graph_nx): for node_features_nx, node_features in zip(graph_nx.nodes(data='features'), graph.node_features): torch.testing.assert_allclose(node_features_nx[1], node_features) # Check edge indexes for (sender_id_nx, receiver_id_nx, *_), sender_id, receiver_id in \ zip(graph_nx.edges, graph.senders, graph.receivers): assert sender_id_nx == sender_id assert receiver_id_nx == receiver_id assert has_edge_features(graph) == has_edge_features(graph_nx) if has_edge_features(graph) and has_edge_features(graph_nx): for (*_, edge_features_nx), edge_features in zip(graph_nx.edges(data='features'), graph.edge_features): torch.testing.assert_allclose(edge_features_nx, edge_features) # Check graph features assert has_global_features(graph) == has_global_features(graph_nx) if has_global_features(graph) and has_global_features(graph_nx): torch.testing.assert_allclose(graph_nx.graph['features'], graph.global_features)
def avg_shortest_path(G: nx.Graph): ''' Calculate the average shortest path between all the vertices of the graph G: Graph to calculate the average shortest path of ''' #Algorithm #1 For all nodes v #1.1 Find dijkstra cost from the node v #1.2 For all path from u where u>=v #1.1.1 Add the path length to sum of all path lengths #1.3 Calculate the average for the node and add it to the total average #2 Display the total average #Check if graph is connected if (not nx.is_connected(G)): print("Graph is not connected.") return math.inf n = G.number_of_nodes() vertices = set(sorted(G.nodes)) totalPathLength = 0 for i in G.nodes: #V*(E+VLogV) pathLength = 0 result = dijkstra(G, i, 'all') #EVLogV vertices.remove(i) for j in vertices: #O(V) pathLength += result['cost'][j] totalPathLength += (2 / (n * (n + 1))) * pathLength return totalPathLength
def hcsw(graph: nx.Graph, multiplier_threshold: float = 2) -> nx.Graph: """Clusters a connected undirected weighted graph. Returns a graph with the same nodes but not necessarily connected """ number_of_nodes = graph.number_of_nodes() logger.debug(f'Clustering graph with {number_of_nodes} nodes') # singular graphs are already clustered if number_of_nodes < 2: logger.debug('Graph too small, exiting') return graph cut_weight, partitions = nx.algorithms.connectivity.stoer_wagner(graph) if not highly_connected(graph, cut_weight, multiplier_threshold): logger.debug('Graph not dense, performing cut') sub_graphs = [graph.subgraph(v).copy() for v in partitions] component_1 = hcsw(sub_graphs[0]) component_2 = hcsw(sub_graphs[1]) graph = nx.compose(component_1, component_2) else: logger.debug('Graph is dense, skipping cut') return graph
def CreatTree_forOneGraph(root_nodes,G:nx.Graph,add_super_nodes=False,max_depth=3): ''' creat trees for one graph :param G: :param root_nodes: :return: k tree for the graph ''' # here we can use a super nodes to conect all the trees for one graph gtrees=[] N = G.number_of_nodes() #node_flag = np.zeros((N)) for rt in root_nodes: node_flag = np.zeros((N)) #adj_list=G.adjacency_list() adj = G.adj adj_list = [] for key, val in adj.items(): nei = [k for k in val.keys()] adj_list.append(nei) one_tree = creat_onetree(rt, adj_list, node_flag, depth= 0, max_depth= max_depth) gtrees.append(one_tree) if add_super_nodes: return pass return gtrees
def get_fbvs(self, graph: Graph): if is_acyclic(graph): return set() if type(graph) is not MultiGraph: graph = MultiGraph(graph) for i in range(1, graph.number_of_nodes()): result = self.get_fbvs_max_size(graph, i) if result is not None: return result # in the worst case, result is n-2 nodes
def test_algorithms(algorithms, graph: nx.Graph): print() print("Testing graph with {0} nodes and {1} edges".format(graph.number_of_nodes(), graph.number_of_edges())) results = [] for algorithm, name in algorithms: # make a copy of the graph in case the algorithm mutates it graph_copy = graph.copy() start_time = time.time() result = len(algorithm.get_fbvs(graph_copy)) print("{0}: {1}, time: {2}".format(name, result, time.time() - start_time)) results.append(result) assert results.count(results[0]) == len(results), "The algorithms's results are not the same!"
def test_algorithms(algorithms, graph: Graph, k): print() print("Testing graph with {0} nodes and {1} edges, expected result: {2}" .format(graph.number_of_nodes(), graph.number_of_edges(), k)) for algorithm, name in algorithms: start_time = time.time() args = inspect.getfullargspec(algorithm)[0] if len(args) == 2: result = len(algorithm(graph)) else: result = len(algorithm(graph, k)) print("{0}: {1}, time: {2}".format(name, result, time.time() - start_time)) assert k == result, "Wrong result!"
graph.add_edge(last, node, type=tags['highway']) #edges += 1 last = node elif isinstance(item, Node): pos = utm.from_latlon(item.lat, item.lon, force_zone_number=utm_zone_number) if not utm_zone_number: utm_zone_number = pos[2] graph.add_node(item.id, lat=item.lat, lon=item.lon, pos=pos[:2]) #nodes += 1 items += 1 print('{0} items processed'.format(items), end='\r') print('{0} items processed'.format(items)) if shape_file: n = graph.number_of_nodes() i = 0 print('Apply shapefile') for node in graph.nodes(): p = Point(graph.node[node]['lon'], graph.node[node]['lat']) if not shape_file.contains(p): graph.remove_node(node) i += 1 print('{0}/{1} nodes processed'.format(i, n), end='\r') print('{0}/{1} nodes processed'.format(i, n)) print('Search for orphaned nodes') orphaned = set() n = graph.number_of_nodes() i = 0 for node in graph.nodes_iter():
ips = {} # filter all relays in this consensus to those that # have a descriptor, are running, and are fast for relay in consensus.relays: if (relay in descriptors): sd = descriptors[relay] # server descriptor rse = consensus.relays[relay] # router status entry if "Running" in rse.flags and "Fast" in rse.flags: if relay not in ips: ips[relay] = [] ips[relay].append(sd.address) # build edges between every relay that could have been # selected in a path together for r1 in ips: for r2 in ips: if r1 is r2: continue g.add_edges_from(product(ips[r1], ips[r2])) nsf_i += 1 # check if we should do a checkpoint and save our progress if nsf_i == nsf_len or "01-00-00-00" in fname: chkpntstart = fname[0:10] with open("relaypairs.{0}--{1}.json".format(chkpntstart, chkpntend), 'wb') as f: json.dump(g.edges(), f) print "" print('Num addresses: {0}'.format(g.number_of_nodes())) print('Num unique pairs: {0}'.format(g.number_of_edges())) # write final graph to disk with open(out_file, 'wb') as f: json.dump(g.edges(), f) ##########
__author__ = 'zplin' import sys import json import csv from os import path import numpy as np from networkx import Graph, transitivity, clustering, average_shortest_path_length, connected_component_subgraphs from networkx.readwrite import json_graph if __name__ == '__main__': with open(sys.argv[1]) as g_file: data = json.load(g_file) g = Graph(json_graph.node_link_graph(data)) print('Number of nodes:', g.number_of_nodes()) print('Average degree:', 2 * g.number_of_edges()/g.number_of_nodes()) print('Transitivity:', transitivity(g)) cc = clustering(g) print('Average clustering coefficient:', np.mean(list(cc.values()))) for subgraph in connected_component_subgraphs(g): if subgraph.number_of_nodes() > 1: print('Average shortest path length for subgraph of', subgraph.number_of_nodes(), ':', average_shortest_path_length(subgraph)) # Calculating average clustering coefficient for different degrees degree_cc = {} for node, degree in g.degree_iter(): if degree not in degree_cc: degree_cc[degree] = [] degree_cc[degree].append(cc[node]) with open(path.join(path.dirname(sys.argv[1]), 'clustering.csv'), 'w', newline='') as cc_file: