def random_binary_dgm(n=10, p=0.2): G = nx.gnr_graph(n, p) dgm = DGM() dgm.add_nodes_from(G.nodes()) dgm.add_edges_from(G.edges()) nx.set_node_attributes(dgm, 'CPD', { node: TableFactor(random_table_factor(dgm.in_degree(node) + 1), list(dgm.predecessors(node)) + [node]) for node in dgm.nodes() }) return dgm
def named_subgraph_responses(client): route = API_LATEST + "/network/subgraph" responses = {} slow_graph = clean_graph_dict(nx.gnr_graph(200, p=0.05, seed=42)) nodes = [{"id": "3"}, {"id": "29"}, {"id": "18"}] init_post_requests = [ # name, file or object, is-fast ("subgraph_response_fast", get_payload("network_subgraph_request.json"), True), ( "subgraph_response_slow", json.dumps(dict(graph=slow_graph, nodes=nodes)), False, ), ] for name, payload, isfast in init_post_requests: response = client.post(route, data=payload) responses[name] = response result_route = response.json()["result_route"] if isfast: # trigger the svg render here so it's ready to get later. client.get(result_route + "/img?media_type=svg") time.sleep(0.5) yield responses
def generate_random_watershed_solve_request(context, n_nodes=55, pct_tmnt=0.5, seed=42): g = nx.relabel_nodes(nx.gnr_graph(n=n_nodes, p=0.0, seed=seed), lambda x: str(x)) request = generate_random_watershed_solve_request_from_graph( g, context, pct_tmnt=pct_tmnt) return request
def graph_sel(graphType, n, p): ''' Funzione che genera e restituisce un grafo del tipo prescelto. Viene invocata solo se, nell'inizializzazione del repository, non viene direttamente specifica un grafo ma solo un graph type. ''' return { 'gn': lambda: nx.gn_graph(n), 'gnr': lambda: nx.gnr_graph(n, p), 'gnc': lambda: nx.gnc_graph(n), 'scale_free': lambda: nx.scale_free_graph(n), 'erdos_renyi': lambda: nx.erdos_renyi_graph(n, p, directed=True), 'nSCC_graph': lambda: self.nSCC_graph(n) }.get(graphType, graphType)()
def generate_n_random_valid_watershed_graphs( n_graphs: int = 3, min_graph_nodes: int = 20, max_graph_nodes: int = 50, seed: int = 42, ): G = nx.DiGraph() numpy.random.seed(seed) for i in range(n_graphs): n_nodes = numpy.random.randint(min_graph_nodes, max_graph_nodes) offset = len(G.nodes()) g = nx.gnr_graph(n_nodes, 0.0, seed=i) G.add_edges_from([((offset + s), (offset + t)) for s, t in g.edges]) return G
def generate_random_graph(n, p): """ Randamly generate graph. Paremeters ---------- p: float The redirection probability Returns ------- DiGraph """ DG = nx.gnr_graph(n, p) return DG
def directed_graphs(): print("Directed graphs") print("Growing network") D = nx.gn_graph(10) # the GN graph draw_graph(D) G = D.to_undirected() # the undirected version draw_graph(G) D = nx.gn_graph(10, kernel=lambda x: x**1.5) # A_k = k^1.5 draw_graph(D) print("Growing network graph") D = nx.gnr_graph(n=11, p=0.3) draw_graph(D) G = D.to_undirected() draw_graph(G) print("Growing network with copying graph") D = nx.gnc_graph(n=7) draw_graph(D) G = D.to_undirected() draw_graph(G) print("Scale-free graph") G = nx.scale_free_graph(10) draw_graph(G)
def named_validation_responses(client): route = API_LATEST + "/network/validate" responses = {} slow_valid = json.dumps( clean_graph_dict(nx.gnr_graph(15000, p=0.05, seed=42))) slow_invalid = json.dumps(clean_graph_dict(nx.gnc_graph(15000, seed=42))) init_post_requests = [ ("valid_graph_response_fast", get_payload("network_validate_is_valid.json")), ( "invalid_graph_response_fast", get_payload("network_validate_is_invalid_cycle.json"), ), ("valid_graph_response_slow", slow_valid), ("invalid_graph_response_slow", slow_invalid), ] for name, payload in init_post_requests: response = client.post(route, data=payload) responses[name] = response yield responses
Created on Mon Jan 25 19:40:10 2016 @author: hp """ import csv import re import matplotlib.pyplot as plt import networkx as nx from operator import itemgetter from networkx.algorithms import bipartite from networkx.utils import (powerlaw_sequence, create_degree_sequence) #from igraph import Graph, mean import numpy as nm #G=nx.gnm_random_graph(2939,30501,directed=True) D = nx.gnr_graph(49, 0.09083) # the GNR graph G = D.to_undirected() # the undirected version #nx.draw_random(G1) print(bipartite.is_bipartite(G)) d = nx.degree(G) nx.draw(G, nodelist=d.keys()) #nx.draw(G, nodelist=d.keys(), node_size=[v * 20 for v in d.values()]) #plt.savefig("./random/sameNodesize.png") plt.show() print('diameter ',nx.diameter(G, e=None))# graph not connected print('debsity', nx.density(G)) print("clustering coefficient",nx.average_clustering(G)) #print("average degree ", nm.mean(G.degree()))
def watershed_graph(): g = nx.gnr_graph(n=13, p=0.0, seed=0) nx.relabel_nodes(g, lambda x: str(x), copy=False) return g
def generate_random_graph_request(n_nodes, seed=0): # pragma: no cover g = nx.gnr_graph(n=n_nodes, p=0.0, seed=seed) graph_dict = clean_graph_dict(g) return {"graph": graph_dict}
# draw networkx graph nx.draw_networkx_nodes(G, pos, node_size = 200) nx.draw_networkx_edges(G, pos) nx.draw_networkx_labels(G, pos, labels = labels, font_size = 12) if (weighted): edge_labels = nx.get_edge_attributes(G, "weight") nx.draw_networkx_edge_labels(G, pos, edge_labels = edge_labels) # resize graph fig = plt.gcf() fig.set_size_inches((14, 14), forward = False) plt.savefig(fname + "_original.png") # clear previous graph plt.clf() fname = "data/directed_weighted_gnr_graph" # path and filename # Generate directed, weighted GNR graph G = nx.gnr_graph(n=20, p=0.35, seed=1234) for (u,v) in G.edges(): G[u][v]["weight"] = round(random.uniform(0,1),3) # Plots G using base spring layout labels = {} for node in G.nodes(): labels[node] = node pos = nx.spring_layout(G) draw_graph(G, pos, fname, labels, weighted=True) # Embeds G, plots and saves results spectral_embedder(G, fname, directed=nx.is_directed(G), weighted=True, plot=True, symmetric=False)
def random_gnr(n_var, p=0.2): return nx.gnr_graph(n_var, p)
def generate_gnr(params={'n': 20, 'p': 0.2}): G = nx.gnr_graph(params['n'], params['p']) return G, None
def generateRandomGraph(n, p): DG = nx.gnr_graph(n, p) return DG
def test_facility_load_reduction(contexts, tmnt_facility): context = contexts["default"] g = nx.relabel_nodes(nx.gnr_graph(n=3, p=0.0, seed=0), lambda x: str(x)) data = { "2": { "area_acres": 9.58071049103565, "imp_area_acres": 5.593145122640718, "perv_area_acres": 3.9875653683949315, "imp_ro_volume_cuft": 228016.14562485245, "perv_ro_volume_cuft": 55378.354666523395, "runoff_volume_cuft": 283394.50029137585, "eff_area_acres": 6.461638142128291, "developed_area_acres": 9.58071049103565, "TSS_load_lbs": 2258.8814515144954, "TCu_load_lbs": 0.9702150595320715, "FC_load_mpn": 4140816712319.9717, "winter_dwTSS_load_lbs": 251.83974023768664, "summer_dwTSS_load_lbs": 330.06583891090344, "winter_dwTCu_load_lbs": 0.10816800872990859, "summer_dwTCu_load_lbs": 0.14176700035928835, "winter_dwFC_load_mpn": 461654242414.25323, "summer_dwFC_load_mpn": 605052620628.5996, "winter_dry_weather_flow_cuft_psecond": 0.002874213147310695, "winter_dry_weather_flow_cuft": 31595.282386474148, "summer_dry_weather_flow_cuft_psecond": 0.002874213147310695, "summer_dry_weather_flow_cuft": 41409.36365593464, "land_surfaces_count": 1, "imp_pct": 58.37923114234624, "ro_coeff": 0.6744424798321826, "TSS_conc_mg/l": 127.68000000000005, "TCu_conc_ug/l": 54.84000000000001, "FC_conc_mpn/100ml": 51600.0, "winter_dwTSS_conc_mg/l": 127.68000000000008, "winter_dwTCu_conc_ug/l": 54.84, "winter_dwFC_conc_mpn/100ml": 51600.0, "summer_dwTSS_conc_mg/l": 127.68000000000005, "summer_dwTCu_conc_ug/l": 54.84, "summer_dwFC_conc_mpn/100ml": 51599.99999999999, }, } data["1"] = tmnt_facility nx.set_node_attributes(g, data) solve_watershed_loading(g, context) assert all([len(dct["node_errors"]) == 0 for n, dct in g.nodes(data=True)]) assert len(g.nodes["0"] ["node_warnings"]) >= 1 # there is no node_id for this node. sum_ret = sum( nx.get_node_attributes(g, "runoff_volume_cuft_retained").values()) sum_inflow = sum(nx.get_node_attributes(g, "runoff_volume_cuft").values()) outflow = g.nodes["0"]["runoff_volume_cuft_total_discharged"] assert abs(sum_inflow - sum_ret - outflow) / sum_inflow < 1e-15 scalers = [ ("summer_dwTSS_load_lbs_removed", "summer_dwTSS_load_lbs_total_removed"), ("runoff_volume_cuft_retained", "runoff_volume_cuft_total_retained"), ( "summer_dry_weather_flow_cuft_retained", "summer_dry_weather_flow_cuft_total_retained", ), ( "summer_dry_weather_flow_cuft_psecond_retained", "summer_dry_weather_flow_cuft_psecond_total_retained", ), ] for s, t in scalers: outfall_total = g.nodes["0"][t] sum_individual = sum(nx.get_node_attributes(g, s).values()) # assert that these add up assert abs(sum_individual - outfall_total) < 1e-6, (s, t) tmnt_node = g.nodes["1"] params = [ ("summer_dwTSS_load_lbs", "summer_dwTSS_load_lbs_total_discharged"), ] if "diversion" not in tmnt_facility.get("facility_type", ""): assert tmnt_node["captured_pct"] > 0 assert tmnt_node["TSS_load_lbs_removed"] > 0 assert tmnt_node["runoff_volume_cuft_captured"] > 0 assert tmnt_node["winter_dry_weather_flow_cuft_captured_pct"] > 0 assert tmnt_node["TSS_load_lbs_inflow"] > tmnt_node[ "TSS_load_lbs_discharged"] assert (tmnt_node["winter_dwTSS_load_lbs_inflow"] > tmnt_node["winter_dwTSS_load_lbs_discharged"]) params += [ ("TSS_load_lbs", "TSS_load_lbs_total_discharged"), ("winter_dwTSS_load_lbs", "winter_dwTSS_load_lbs_total_discharged"), ] for s, t in params: outfall_total = g.nodes["0"][t] sum_individual = sum(nx.get_node_attributes(g, s).values()) # assert that load reduction occurred assert outfall_total < sum_individual, (s, t) assert tmnt_node["summer_dry_weather_flow_cuft_captured_pct"] > 0 assert (tmnt_node["summer_dwTSS_load_lbs_inflow"] > tmnt_node["summer_dwTSS_load_lbs_discharged"]) for n, dct in g.nodes(data=True): if "_nomograph_solution_status" in dct: assert "successful" in dct["_nomograph_solution_status"]