def dump_graph(fileobj, n, p, g, **kwargs): """Creates and dumps a graph to the open pickle file. NOTE: It is important to create the graph here, in order to be garbage collected upon exit. """ adj_list, _ = graph.random_graph(n=n, p_0=p / 10, random_seed=g, **kwargs) pickle.dump(adj_list, fileobj)
def graph_density_contour(n, graphs_per_measure=10, directed=True, weights_on="edges-and-nodes"): """Plots density vs sigmoid_center & initial_probability: d(c, p_0).""" import numpy as np densities = [[0] * 10 for _ in range(10)] # The density of each graph type is taken as the mean density of the # <graphs_per_measure> graphs. densities_per_graph_type = [] for c in range(1, 11): for p in range(1, 11): for g in range(graphs_per_measure): print((f"Graph batch: {(c - 1) * 10 + p}/100" f" Instance: {g + 1}/{graphs_per_measure}"), end='\r') G, probs, edge_lengths, edge_lengths_true = \ graph.random_graph(n, directed=directed, weights_on=weights_on, random_seed=1, center_portion=c / 10, gradient=1, p_0=p / 10, get_probability_distribution=True) densities_per_graph_type.append( graph.density(n, len(edge_lengths_true), directed) ) densities[c - 1][p - 1] = mean(densities_per_graph_type) densities_per_graph_type.clear() plt.figure(figsize=(10, 10), dpi=200) x = y = np.arange(0.1, 1.1, 0.1) X, Y = np.meshgrid(x, y) cm = plt.cm.get_cmap('viridis') levels = np.arange(0, 1.1, 0.1) cp = plt.contour(X, Y, densities, levels=levels, colors="gainsboro", linewidths=0.2) plt.clabel(cp, fmt="%1.1f", fontsize=10, colors="gainsboro", rightside_up=False, manual=True) plt.contourf(X, Y, densities, cmap=cm, levels=levels) plt.xlabel("${p_0}$", fontsize=10) plt.ylabel("sigmoid center portion", fontsize=10) plt.tick_params(axis='both', which='major', labelsize=10) plot_title = ("Graph density\n" f"n: {n} random-graphs/measure: {graphs_per_measure}") plt.title(plot_title, fontsize=12) plt.show()
def measure(n, i, modes, problem, k, times, failing, online, **kwargs): """Replacement-paths search timer on the <i>th graph and for all the <modes>. It is important that upon function exit, the graphs are garbage collected. """ adj_list, _ = graph.random_graph(n=n, random_seed=i, gradient=0.3, center_portion=0.15, p_0=0.3, **kwargs) init_config = { "adj_list": adj_list, "adj_list_reverse": graph.adj_list_reversed(adj_list), "source": 1, "sink": n } for solver, m in modes.items(): mode = { "bidirectional": m[0], "parallel": m[1], "dynamic": m[2], "failing": failing, "online": online, "verbose": 0 } start = timer() if problem == "k-shortest-paths": paths = core.k_shortest_paths(k, mode, init_config) else: paths = core.replacement_paths(mode, init_config) end = timer() times[solver].append(end - start) return times
def main(ctx, path, nodetype, comments, delimiter, encoding, source, target, n, weighted, directed, weights_on, max_edge_weight, max_node_weight, K, yen, lawler, bidirectional, parallel, dynamic, random_seed, layout_seed, show_graph, save_graph, verbose): # 1. Preprocessing if path is None: decoder = None source = int(source) if source else 1 target = int(target) if target else n adj_list, G = graph.random_graph(n=n, weighted=weighted, directed=directed, weights_on=weights_on, max_edge_weight=max_edge_weight, max_node_weight=max_node_weight, random_seed=random_seed) else: if (source is None) or (target is None): raise Exception( "Both source and target sould be defined via the -s and" " -t options.") nodetype = utils.str_to_type(nodetype) read_graph_config = { "path": path, "nodetype": nodetype, "comments": comments, "delimiter": delimiter, "encoding": encoding } if directed: read_graph_config["create_using"] = nx.DiGraph else: read_graph_config["create_using"] = nx.Graph adj_list, G, encoder, decoder = io.read_graph(read_graph_config, weighted) source = encoder[nodetype(source)] target = encoder[nodetype(target)] if dynamic: bidirectional = True if bidirectional: adj_list_reverse = graph.adj_list_reversed(adj_list) else: adj_list_reverse = None init_config = { "adj_list": adj_list, "adj_list_reverse": adj_list_reverse, "source": source, "sink": target } mode = { "bidirectional": bidirectional, "parallel": parallel, "dynamic": dynamic, "failing": "edges", "online": True, "verbose": verbose } ctx_config = { "init_config": init_config, "mode": mode, "G": G, "layout_seed": layout_seed, "show_graph": show_graph, "save_graph": save_graph, "decoder": decoder } if ctx.invoked_subcommand is not None: if ctx.invoked_subcommand == "dynamic_graph_demo": ctx_config["random_seed"] = random_seed ctx.ensure_object(dict) ctx.obj.update(ctx_config) return mode.update({"yen_": yen, "lawler": lawler}) # 2. Paths generation k_paths = core.k_shortest_paths(K, mode, init_config) if decoder: k_paths = graph.decode_path_nodes(k_paths, decoder) # 3. Post-processing if verbose: post.print_paths(k_paths) if save_graph or show_graph: ctx_config.pop("init_config") ctx_config.pop("decoder") post.plot_paths(paths_data=k_paths, **ctx_config)
def main(): n = 100 directed = False weights_on = "edges-and-nodes" c = 0.3 gradient = 0.5 random_seed = 1 if directed: m_max = n * (n - 1) else: m_max = n * (n - 1) // 2 center = c * (n - 1) G_1, probs_1, edge_lengths_1, edge_lengths_true_1 = random_graph( n, directed=directed, weights_on=weights_on, random_seed=random_seed, gradient=gradient, center_portion=c, p_0=1, get_probability_distribution=True) m_1 = G_1.number_of_edges() G_2, probs_2, edge_lengths_2, edge_lengths_true_2 = random_graph( n, directed=directed, weights_on=weights_on, random_seed=random_seed, gradient=gradient, center_portion=c, p_0=0.7, get_probability_distribution=True) m_2 = G_2.number_of_edges() axes_title_fontsize = 12 fig1, ax1 = plt.subplots(1, 3, figsize=(10, 5), dpi=200) # a ax1[0].scatter(edge_lengths_1, probs_1, s=10) ax1[0].set_ylim(0, 1.1) ax1[0].set_xlim(0, 110) # ax1[0].set_ylabel("p(x)", rotation=False, fontsize=10) ax1[0].set_title( "a) $\mathregular{p_0}$: 1.0" + f", λ: {gradient}, c: {c}", # noqa: W605 fontsize=axes_title_fontsize) # b ax1[1].hist(edge_lengths_1, bins=range(1, 102)) ax1[1].set_ylim(0, 110) ax1[1].set_xlim(0, 110) # complete graph edge distance frequency ax1[1].set_title("b) f'(x) = 100 - x", fontsize=axes_title_fontsize) # c ax1[2].hist(edge_lengths_true_1, bins=range(1, 102)) ax1[2].set_ylim(0, 110) ax1[2].set_xlim(0, 110) ax1[2].set_title(f"c) {m_1}/{m_max}", fontsize=axes_title_fontsize) fig2, ax2 = plt.subplots(1, 3, figsize=(10, 5), dpi=200) # d ax2[0].scatter(edge_lengths_2, probs_2, s=10) ax2[0].set_ylim(0, 1.1) ax2[0].set_xlim(0, 110) # ax2[0].set_ylabel("p(x)", rotation=False, fontsize=10) ax2[0].set_title( "d) $\mathregular{p_0}$: 0.7" + f", λ: {gradient}, c: {c}", # noqa: W605 fontsize=axes_title_fontsize) # e expected_edge_weight_freq = [ 0.7 * (1 - 1 / (1 + math.exp(-gradient * (i - center)))) * edge_lengths_2.count(i) for i in range(1, 102) ] ax2[1].bar(range(1, 102), expected_edge_weight_freq, width=1) ax2[1].set_ylim(0, 110) ax2[1].set_xlim(0, 110) ax2[1].set_xlabel("x = abs(head - tail)", fontsize=axes_title_fontsize) ax2[1].set_title((f"e) {int(sum(expected_edge_weight_freq))}/{m_max}" "\nf(x) = p(x)f'(x)"), fontsize=axes_title_fontsize, y=0.93, linespacing=1.8) # f ax2[2].hist(edge_lengths_true_2, bins=range(1, 102)) ax2[2].set_ylim(0, 110) ax2[2].set_xlim(0, 110) ax2[2].set_title(f"f) {m_2}/{m_max}", fontsize=axes_title_fontsize) for ax in [ax1, ax2]: for j in [0, 1, 2]: ax[j].tick_params(axis='both', which='major', labelsize=12) fig1.savefig("graph_model_vis_1.png") fig2.savefig("graph_model_vis_2.png")
def main(n, tail_idx, random_seed, layout_seed, center_portion, gradient, p_0, online, show_graph, save_graph): directed = False weights_on = "edges-and-nodes" failing = "edges" source = 1 sink = n adj_list, G = graph.random_graph(n=n, directed=directed, weights_on=weights_on, random_seed=random_seed, center_portion=center_portion, gradient=gradient, p_0=p_0) adj_list_reverse = graph.adj_list_reversed(adj_list) init_config = { "adj_list": adj_list, "adj_list_reverse": adj_list_reverse, "source": source, "sink": sink } mode = { "bidirectional": True, "dynamic": True, "failing": failing, "online": online } path_data, tapes = core.first_shortest_path(mode, init_config) if online: r_paths, visited_reverse_me, visited_reverse_query, failed_head = \ core.replacement_paths(mode, init_config) else: r_paths = core.replacement_paths(mode, init_config) paths_data_me = [path_data] paths_data = [path_data] base_path = path_data[0] meeting_edge_head = path_data[4] meeting_edge_head_idx = path_data[0].index(meeting_edge_head) query_tail = base_path[tail_idx] num_paths = 0 for path in r_paths[1:]: if path[3][1] == meeting_edge_head: paths_data_me.append(path) num_paths += 1 if path[3][0] == query_tail: paths_data.append(path) num_paths += 1 if num_paths == 2: break if online: # Get the state that corresponds to the meeting edge. visited_nodes_reverse_me = post.visited_nodes(visited_reverse_me, sink) # Get the state that corresponds to the query edge. if tail_idx > meeting_edge_head_idx: visited_nodes_reverse = post.visited_nodes(visited_reverse_query, sink) else: visited_nodes_reverse = visited_nodes_reverse_me visited_nodes_forward_me = visited_nodes_forward = set() else: # Get the state that corresponds to the meeting edge. visited_nodes_forward_me, visited_nodes_reverse_me = nodes_retrieved( meeting_edge_head_idx - 1, failing, tapes, base_path, source, sink) # Get the state that corresponds to the query edge. visited_nodes_forward, visited_nodes_reverse = nodes_retrieved( tail_idx, failing, tapes, base_path, source, sink) visited_after_retrieval_me, visited_after_retrieval = \ visited_after_retrieved(random_seed, tail_idx, online) # 1. State retrieval vis for the meeting edge # (all the visited nodes are retrieved) if not online: post.state_retrieval_vis( G, paths_data_me, visited_nodes_forward_me, visited_nodes_reverse_me, visited_nodes_forward_me, visited_nodes_reverse_me, visited_after_retrieval_me, (base_path[meeting_edge_head_idx - 1], meeting_edge_head), mode, random_seed, layout_seed, show_graph, save_graph) # 2. State retrieval vis for the in query edge # (all the visited nodes are retrieved) post.state_retrieval_vis( G, paths_data, visited_nodes_forward_me, visited_nodes_reverse_me, visited_nodes_forward, visited_nodes_reverse, visited_after_retrieval, (base_path[meeting_edge_head_idx - 1], meeting_edge_head), mode, random_seed, layout_seed, show_graph, save_graph)
def main(n, bidirectional, random_seed, layout_seed, save_graph): # source, sink = 100, n - 100 source, sink = 1, n adj_list, G = graph.random_graph(n, weighted=True, directed=False, weights_on="edges-and-nodes", max_edge_weight=1000, max_node_weight=50, random_seed=random_seed) if bidirectional: adj_list_reverse = graph.adj_list_reversed(adj_list) else: adj_list_reverse = None forward_config, reverse_config = dijkstra.dijkstra_init( source, sink, adj_list, adj_list_reverse) if bidirectional: prospect = [0, 0, 0, 0] top_reverse = 0 while forward_config["to_visit"] and reverse_config["to_visit"]: # Forward step visited, top_forward, prospect = dijkstra._dijkstra_step( **forward_config, opposite_visited=reverse_config["visited"], prospect=prospect, is_forward=True) if top_forward + top_reverse > prospect[0] != 0: break # Reverse step visited_reverse, top_reverse, prospect = dijkstra._dijkstra_step( **reverse_config, opposite_visited=forward_config["visited"], prospect=prospect, is_forward=False) if top_forward + top_reverse > prospect[0] != 0: break path, _ = dijkstra.extract_bidirectional_path( source, sink, prospect, {}, visited=visited, visited_reverse=visited_reverse) meeting_edge_head = prospect[2] else: visited = dijkstra.dijkstra(**forward_config) path, _ = dijkstra.extract_path(source, sink, visited, {}) visited_reverse = None meeting_edge_head = None # post.state_vis(forward_config["to_visit"], # visited, # source, # sink, # G=G) post.plot_search_sphere(G, visited, path, show_graph=True, save_graph=save_graph, layout_seed=layout_seed, visited_reverse=visited_reverse, meeting_edge_head=meeting_edge_head)
def measure(n, g, p, problems, k=None, failing=None, measurements_per_graph=3, ds_fileobj=None, dataset=None, **kwargs): """A measurement corresponds to a single unique graph and incorporates all specified problems on all specified search types (solvers). It is important that upon function exit, the graphs are garbage collected. Returns: timings (array) : Each row corresponds to a problem and comprises a list with the timings for each solver. """ if dataset is None: adj_list, _ = graph.random_graph(n=n, random_seed=g, p_0=p / 10, **kwargs) if ds_fileobj: pickle.dump(adj_list, ds_fileobj) else: adj_list = next(dataset) adj_list_reverse = graph.adj_list_reversed(adj_list) init_config = { "adj_list": adj_list, "adj_list_reverse": None, "source": 1, "sink": n } timings = [] for i, probl in enumerate(problems): if probl == "k-shortest-paths": solvers = { 'yen_': [False, False], 'lawler': [False, False], 'bidirectional': [True, False], 'dynamic': [True, True] } mode = {"failing": "edges", "online": True} else: solvers = { 'unidirectional': [False, False], 'bidirectional': [True, False], 'dynamic': [True, True] } if probl.endswith("online"): mode = {"failing": failing, "online": True} else: mode = {"failing": failing, "online": False} timings.append([0] * len(solvers)) for j, (s, m) in enumerate(solvers.items()): mode.update({"bidirectional": m[0], "dynamic": m[1]}) if s == "yen_": mode.update({"yen_": True, "lawler": False}) elif s == "lawler": mode.update({"yen_": False, "lawler": True}) elif m[0]: init_config["adj_list_reverse"] = adj_list_reverse mode.update({"yen_": False, "lawler": False}) timings[i][j] = time_a_run(mode, init_config, probl, k, times=measurements_per_graph) return timings