def iterative_deepening_dfs(g: nx.DiGraph, source: str, destination: str): Adj = dict(g.adjacency()) Adj = {i: list(Adj[i]) for i in Adj} Level = get_level(g, source) max_depth = max(map(int, Level.values())) for depth in range(max_depth + 1): Result, Path = dfs(Adj, Level, source, destination, depth) if Result: break if Result: print("\n ~: Path Found :~ \n") print(" -> ".join(Path)) else: print("\n ~: Path Not Found :~ ")
def bfs(g: nx.DiGraph, start_node: str): Adj = dict(g.adjacency()) Adj = {i: list(Adj[i]) for i in Adj} Visited = {i: False for i in Adj} Queue = [start_node] Visited[start_node] = True Result = [] while Queue: s = Queue.pop(0) Result.append(s) for node in Adj[s]: if not Visited[node]: Queue.append(node) Visited[s] = True print("\n", " -> ".join(Result))
def dfs(g: nx.DiGraph, start_node: str): Adj = dict(g.adjacency()) Adj = {i: list(Adj[i]) for i in Adj} Visited = {i: False for i in Adj} Stack = [start_node] Result = [] while len(Stack): s = Stack[-1] Stack.pop() if not Visited[s]: Result.append(s) Visited[s] = True for node in Adj[s]: if not Visited[node]: Stack.append(node) print("\n", " -> ".join(Result))
def draw(G: DiGraph, ranks: List[float]): print("Plotting...") pos = nx.spring_layout(G) for i in range(len(G.nodes)): G.nodes[i]['pos'] = pos[i] G.nodes[i]['rank'] = ranks[i] edge_trace = Scatter(x=[], y=[], line=Line(width=0.5, color='#888'), hoverinfo='none', mode='arrows') for edge in G.edges(): x0, y0 = G.node[edge[0]]['pos'] x1, y1 = G.node[edge[1]]['pos'] edge_trace['x'] += [x0, x1, None] edge_trace['y'] += [y0, y1, None] sizes = helpers.ranks_to_int(ranks) percents = helpers.ranks_to_percent(ranks) node_trace = Scatter( x=[], y=[], text=[], mode='markers', hoverinfo='text', marker=Marker( showscale=False, # colorscale options # 'Greys' | 'Greens' | 'Bluered' | 'Hot' | 'Picnic' | 'Portland' | # Jet' | 'RdBu' | 'Blackbody' | 'Earth' | 'Electric' | 'YIOrRd' | 'YIGnBu' color=[(float(i[:-1]) * 255)**3 for i in percents], size=sizes, line=dict(width=2))) for node in G.nodes(): x, y = G.node[node]['pos'] node_trace['x'].append(x) node_trace['y'].append(y) for node, adjacency in enumerate(G.adjacency()): node_trace['marker']['color'].append(len(adjacency)) node_info = 'PageRank: ' + percents[node] node_trace['text'].append(node_info) # node_trace[node]['size'] = G.nodes[node]['size'] annot = [(dict(showarrow=True, arrowhead=200, arrowsize=100, arrowwidth=200, arrowcolor='#636363', x=edge[0], y=edge[1], xref='x', yref='y')) for edge in G.edges()] annot.append( dict(showarrow=False, xref="paper", yref="paper", x=0.005, y=-0.002)) fig = Figure(data=Data([edge_trace, node_trace]), layout=Layout(title='Regular PageRank', titlefont=dict(size=16), showlegend=False, hovermode='closest', margin=dict(b=20, l=5, r=5, t=40), annotations=annot, xaxis=XAxis(showgrid=False, zeroline=False, showticklabels=False), yaxis=YAxis(showgrid=False, zeroline=False, showticklabels=False))) py.iplot(fig, filename='networkx')
if args.embeddings_file is not None: # If a similarity embeddings were provided, link the k nearest neighbours to each node kv = KeyedVectors.load_word2vec_format(args.embeddings_file) for node in trange(num_nodes): potential_links = [ int(pair[0]) for pair in kv.most_similar(positive=[str(node)], topn=args.k) ] for adj in potential_links: G.add_edge(node, adj) if args.tfidf_file is not None: with open(args.tfidf_file, "rb") as f: embeddings = pickle.load(f) dist_matrix = cosine_similarity(embeddings) for node in trange(num_nodes): potential_links = np.argpartition(dist_matrix[node], -args.k)[-args.k:] for adj in potential_links: G.add_edge(node, adj) # Save the adjacency list with open(args.output_file, "w") as f: adjlist = G.adjacency() for node, adj in adjlist: line = " ".join([str(node)] + list(map(str, sorted(adj.keys())))) f.write(line + '\n') print("Done.")