def loadings(path, verbose=True): if verbose: print("__LOADINGS__") # loading of graph G = nk.graphio.readGraph(os.path.join(path, "network.dat"), weighted=True, fileformat=nk.Format.EdgeListTabOne) # remove of nul edges and nul degree nodes removed = [] for u, v in G.edges(): if G.weight(u, v) == 0: removed.append((u, v)) res = dict(numberOfnodes=G.numberOfNodes(), numberOfEdges=G.numberOfEdges(), percentOfNulWeight=len( [1 for u, v in G.edges() if G.weight(u, v) == 0]) / G.numberOfEdges()) for (u, v) in removed: G.removeEdge(u, v) # graph properties if verbose: nk.overview(G) # loading of communities gt_partition = nk.community.readCommunities(os.path.join( path, "community.dat"), format="edgelist-t1") # communities properties res["numberOfComGroundtruth"] = gt_partition.numberOfSubsets() if verbose: nk.community.inspectCommunities(gt_partition, G) print(f"{gt_partition.numberOfSubsets()} community detected") return G, gt_partition, res
def gather_real_features(sample_count, node_count, start_position): collection = gateway.real_features() loader = MongoDBLoader() features = FeatureVector() for number in range(start_position, sample_count): print(number) component = get_giant_component( loader.load_real_graph_part(node_count, number + 1)) overview(component) collection.insert_one(features.build_vector_for_graph(component))
def gather_features(sample_count, start_position, generator): collection_features = gateway.get_collection(generator.get_name() + feature_collection_suffix) collection_graph = gateway.get_collection(generator.get_name() + '_graphs') feature_vector = FeatureVector() for number in range(start_position, sample_count): component = get_giant_component(generator.generate()) overview(component) MongoDBStorage().storeGraph(collection_graph, component) collection_features.insert_one( feature_vector.build_vector_for_graph(component))
def gather_br_features(sample_count, start_position, nodes_count=79999): features_collection = gateway.get_collection("BR_features") br_collection = gateway.get_collection("bollobas_riordan_30000") loader = MongoDBLoader() features = FeatureVector() for number in range(start_position, sample_count): graph = loader.load_one_from_collection(number, br_collection) component = get_giant_component(graph) component.removeSelfLoops() overview(component) #features.get_features[9].get_value(component) features_collection.insert_one( features.build_vector_for_graph(component))
def check_graphs(G1, G2): assert (G1.numberOfNodes() == G2.numberOfNodes()) assert (G1.numberOfEdges() == G2.numberOfEdges()) failed = False for i in range(G1.numberOfNodes()): if (G1.degree(i) != G2.degree(i)): print("Degree mismatch of node %d (%d != %d)" % (i, G1.degree(i), G2.degree(i))) failed = True if failed: nk.overview(G1) nk.overview(G2) raise RuntimeError("Degree mismatch")
def gather_cl_features(sample_count, node_count, start_position): d = [] loader = MongoDBLoader() for number in range(sample_count): graph = loader.load_real_graph_part(node_count, number + 1) d.append([graph.degree(v) for v in graph.nodes()]) feature_vector = FeatureVector() for number in range(start_position, sample_count): generator = CLGenerator(d[number]) collection = gateway.get_collection(generator.get_name() + feature_collection_suffix) collection_graph = gateway.get_collection(generator.get_name() + '_graphs') component = get_giant_component(generator.generate()) overview(component) MongoDBStorage().storeGraph(collection_graph, component) collection.insert_one(feature_vector.build_vector_for_graph(component))
def create_weighted_graph(nx_graph): """Create weighted and directed NetworKit graph. Weight is defined by needed travel duration for each road by km / speed limit. :param nx_graph: Networkx graph with information about road type and road length :return: Weighted and directed NetworKit graph """ # convert to weighted and directed NetworKit graph nkit_graph = nkit.nxadapter.nx2nk(nx_graph) graph_weighted_directed = nkit.Graph(nkit_graph.numberOfNodes(), weighted=True, directed=True) # openrouteservice yaml file with defined speed limits for each road type speed_limit = safe_load(open(path.join(BASEDIR, SETTINGS['speed_limits']))) for edge, highway, length in zip( nkit_graph.iterEdges(), [w[2] for w in nx_graph.edges.data('highway')], [float(w[2]) for w in nx_graph.edges.data('length')]): try: if '[' in highway: # in some cases two road types are defined for one road -> take the first one weight = (length / 1000 ) / speed_limit[highway.strip('][').split('\'')[1]] else: weight = (length / 1000) / speed_limit[highway] except KeyError: # for undefined speed limit, take default value of 50 weight = (length / 1000) / 50 # add edges and weight to new, empty nkit graph graph_weighted_directed.addEdge(edge[0], edge[1], weight) nkit.overview(graph_weighted_directed) print('Created weighted NetworKit graph.') return graph_weighted_directed
import networkit as nk import networkx g = nk.generators.ErdosRenyiGenerator(10000, 0.1).generate() nk.overview(g) G = nk.distance.Diameter(g) G.run() diam = G.getDiameter() print(diam)
G, gt_partition, res = loadings(path) tot = G.totalEdgeWeight() # %% # Classic method print("__CLASSIC_METHODS__") for evalname, fdetection in classic_methods: print(f"__{evalname}__") detected = fdetection(G) res.update(partitionRes(G, gt_partition, detected, evalname, "")) # %% # Normalization print("__NORMALIZATION__") for normname, functor in norma.items(): Gn = functor(G) nk.overview(Gn) print("tot: ", Gn.totalEdgeWeight()) assert tot == G.totalEdgeWeight() for evalname, fdetection in [("Louvain", nk.community.detectCommunities), ("PLP", lambda G: nk.community.detectCommunities(G, nk.community.PLP(G)))]: if Gn.totalEdgeWeight() != 0: detected = fdetection(Gn) res.update(partitionRes(G, gt_partition, detected, evalname, normname)) else: ARI, NMI = 1, 1 print(f"1 community detected due to total edge weight equal 0") print(f"NMI:{NMI}") print(f"ARI:{ARI}") res[f"numberOfCom_{evalname}_{normname}"] = 1 res[f"NMI_{evalname}_{normname}"] = NMI res[f"ARI_{evalname}_{normname}"] = ARI print("NMI classement:")