def test_predecessor(self): G=nx.path_graph(4) assert_equal(nx.predecessor(G,0),{0: [], 1: [0], 2: [1], 3: [2]}) assert_equal(nx.predecessor(G,0,3),[2]) G=nx.grid_2d_graph(2,2) assert_equal(sorted(nx.predecessor(G,(0,0)).items()), [((0, 0), []), ((0, 1), [(0, 0)]), ((1, 0), [(0, 0)]), ((1, 1), [(0, 1), (1, 0)])])
def test_predecessor_target(self): G=nx.path_graph(4) p = nx.predecessor(G,0,3) assert_equal(p,[2]) p = nx.predecessor(G,0,3,cutoff=2) assert_equal(p,[]) p,s = nx.predecessor(G,0,3,return_seen=True) assert_equal(p,[2]) assert_equal(s,3) p,s = nx.predecessor(G,0,3,cutoff=2,return_seen=True) assert_equal(p,[]) assert_equal(s,-1)
def test_predecessor_target(self): G = nx.path_graph(4) p = nx.predecessor(G, 0, 3) assert p == [2] p = nx.predecessor(G, 0, 3, cutoff=2) assert p == [] p, s = nx.predecessor(G, 0, 3, return_seen=True) assert p == [2] assert s == 3 p, s = nx.predecessor(G, 0, 3, cutoff=2, return_seen=True) assert p == [] assert s == -1
def LCA(n1, n2, GR): #This funciton was written by Evan Rees. Thanks Evan! #will construct directed graph with edges of (parent,child) tuples and nodes for every parent and child #Reverses the nodes s.t. root is last so max will be lowest common ancestor to both nodes preds_1 = nx.predecessor(GR, n1) # NOTE: Assign n1 and n2 for preds_1 and preds_2 preds_2 = nx.predecessor(GR, n2) common_preds = set([n for n in preds_1]).intersection(set([n for n in preds_2])) LCA = max(common_preds, key=lambda n: preds_1[n]) if max(common_preds) == n1: return(n1) elif max(common_preds) == n2: return(n2) else: return LCA
def LCA(n1, n2, taxlist): G = nx.DiGraph() G.add_edges_from(taxlist) #will construct directed graph with edges of (parent,child) tuples and nodes for every parent and child GR = G.reverse() #Reverses the nodes s.t. root is last so max will be lowest common ancestor to both nodes preds_1 = nx.predecessor(GR, n1) # NOTE: Assign n1 and n2 for preds_1 and preds_2 preds_2 = nx.predecessor(GR, n2) common_preds = set([n for n in preds_1]).intersection(set([n for n in preds_2])) LCA = max(common_preds, key=lambda n: preds_1[n]) if max(common_preds) == n1: print("LCA is : %d" % n1) elif max(common_preds) == n2: print("LCA is : %d" % n2) else: print("LCA is : %d" % LCA)
def can_join(self, wot, idty): """ Checks if an individual must join the wot as a member regarding the wot rules Protocol 0.2 :param wot: Graph to analyse :param idty: Pubkey of the candidate :return: False or True """ # Extract the list of all connected members to idty at steps_max via certificates (edges) linked = networkx.predecessor(wot.reverse(copy=True), idty, cutoff=self.steps_max) sentries = [m for m in self.members if len(wot.out_edges(m)) > self.ySentries(len(self.members))] # List all sentries connected at steps_max from idty linked_in_range = [l for l in linked if l in sentries and l != idty] # Checks if idty is connected to at least xpercent of sentries enough_sentries = len(linked_in_range) >= len(sentries)*self.xpercent if not enough_sentries: print("{0} : Cannot join : not enough sentries ({1}/{2})".format(idty, len(linked_in_range), len(sentries)*self.xpercent)) # Checks if idty has enough certificates to be a member enough_certs = len(wot.in_edges(idty)) >= self.sig_qty if not enough_certs: print("{0} : Cannot join : not enough certifications ({1}/{2}".format(idty, len(wot.in_edges(idty)), self.sig_qty)) return enough_certs and enough_sentries
def Copy_all_shortest_paths_exclude_link(G, source, target, weight=None, color=None, color_exc_inc=None): if weight is not None: pred, dist = copy_dijkstra_predecessor_and_distance( G, source, weight=weight, color=color, color_exc_inc=color_exc_inc) else: pred = nx.predecessor(G, source) if target not in pred: raise nx.NetworkXNoPath() stack = [[target, 0]] top = 0 while top >= 0: node, i = stack[top] if node == source: yield [p for p, n in reversed(stack[:top + 1])] if len(pred[node]) > i: top += 1 if top == len(stack): stack.append([pred[node][i], 0]) else: stack[top] = [pred[node][i], 0] else: stack[top - 1][1] += 1 top -= 1
def test_predecessor_cycle(self): G = nx.cycle_graph(4) pred = nx.predecessor(G, 0) assert pred[0] == [] assert pred[1] == [0] assert pred[2] in [[1, 3], [3, 1]] assert pred[3] == [0]
def dijkstra_all_shortest_paths(G, source, target, weight=None): ''' This function is the networkX's implementation of the "all-shortest-paths-problem" algorithm and is used as ground truth for our implementation. It uses a modified version of the dijkstra algorithm that compute the shortest path length and predecessors on shortest paths.''' if weight is not None: pred,dist = nx.dijkstra_predecessor_and_distance(G,source,weight=weight) else: pred = nx.predecessor(G,source) if target not in pred: raise nx.NetworkXNoPath() stack = [[target,0]] top = 0 while top >= 0: node,i = stack[top] if node == source: yield [p for p,n in reversed(stack[:top+1])] if len(pred[node]) > i: top += 1 if top == len(stack): stack.append([pred[node][i],0]) else: stack[top] = [pred[node][i],0] else: stack[top-1][1] += 1 top -= 1
def get_number_of_bags_eventually_containing(rules): """Get the number of bags that will eventually contain "shiny_gold". This is done by creating a networkx graph. We iterate over the rules and for each bag the rule is for we get the "child" bags for that rule and create an edge between the "parent" bag and the "child" bag. This makes sure there is a node for all the bags and that the bags have all the connections for that bag. Then we use networkx.predecessor() to find all the bags that will eventually have "shiny_gold" as a child. We minus from this as the "shiny_gold" bag itself will be included. Args: rules (dict): The rules to graph. Returns: int: The number of bags that will eventually contain "shiny_gold". """ graph = nx.DiGraph() for parent, contains in rules.items(): for _, child in contains: graph.add_edge(child, parent) return len(nx.predecessor(graph, "shiny_gold")) - 1
def check(): to_remove = [] for node in localState['g']: pre = list(nx.predecessor(localState['g'].reverse(), node).keys()) pre.remove(node) prefer = [] for p in pre: prefer.extend(localState['g'].nodes[p]['songs']) if len(prefer) >= N: to_remove.extend(localState['g'].nodes[node]['songs']) print('to_remove', to_remove) localState['g'].remove_nodes_from(to_remove) for node in to_remove: localState['target_list'].remove(node) connected = [] for s in localState['g']: for t in localState['g']: if s == t: continue if nx.has_path(localState['g'], s, t): connected.append(tuple({s, t})) return connected
def get_k_neighbor(G, L, node): seed = [] pre_node = nx.predecessor(G, node, None, L) for index in pre_node: seed += pre_node[index] return seed
def LCA(n1, n2, GR): #This funciton was written by Evan Rees. Thanks Evan! #will construct directed graph with edges of (parent,child) tuples and nodes for every parent and child #Reverses the nodes s.t. root is last so max will be lowest common ancestor to both nodes preds_1 = nx.predecessor( GR, n1) # NOTE: Assign n1 and n2 for preds_1 and preds_2 preds_2 = nx.predecessor(GR, n2) common_preds = set([n for n in preds_1 ]).intersection(set([n for n in preds_2])) LCA = max(common_preds, key=lambda n: preds_1[n]) if max(common_preds) == n1: return (n1) elif max(common_preds) == n2: return (n2) else: return LCA
def _edge_betweenness(G,source,nodes,cutoff=False): """ Edge betweenness helper. """ between={} # get the predecessor data #(pred,length)=_fast_predecessor(G,source,cutoff=cutoff) (pred,length)=nx.predecessor(G,source,cutoff=cutoff,return_seen=True) # order the nodes by path length onodes = [ nn for dd,nn in sorted( (dist,n) for n,dist in length.items() )] # intialize betweenness, doesn't account for any edge weights for u,v in G.edges(nodes): between[(u,v)]=1.0 between[(v,u)]=1.0 while onodes: # work through all paths v=onodes.pop() if v in pred: num_paths=len(pred[v]) # Discount betweenness if more than for w in pred[v]: # one shortest path. if w in pred: num_paths=len(pred[w]) # Discount betweenness, mult path for x in pred[w]: between[(w,x)]+=between[(v,w)]/num_paths between[(x,w)]+=between[(w,v)]/num_paths return between
def _edge_betweenness(G, source, nodes=None, cutoff=False): """ Edge betweenness helper. """ # get the predecessor data (pred, length) = nx.predecessor(G, source, cutoff=cutoff, return_seen=True) # order the nodes by path length onodes = [n for n, d in sorted(length.items(), key=itemgetter(1))] # intialize betweenness, doesn't account for any edge weights between = {} for u, v in G.edges(nodes): between[(u, v)] = 1.0 between[(v, u)] = 1.0 while onodes: # work through all paths v = onodes.pop() if v in pred: # Discount betweenness if more than one shortest path. num_paths = len(pred[v]) for w in pred[v]: if w in pred: # Discount betweenness, mult path num_paths = len(pred[w]) for x in pred[w]: between[(w, x)] += between[(v, w)] / num_paths between[(x, w)] += between[(w, v)] / num_paths return between
def extract_levels(self, dependency_graph_between_bbl): self.log_handler.info("Extracting levels...") import networkx as nx longest_path = nx.dag_longest_path(dependency_graph_between_bbl) level = len(longest_path) bb2lvl = {} levels = {} if self.extractLevelInDatails: levels[0] = {longest_path[0]} bb2lvl[longest_path[0]] = 0 for lvl in range(0, level): children = set( nx.predecessor(dependency_graph_between_bbl, longest_path[lvl], cutoff=1)) forbiden_child = set([]) children = children - ({[longest_path[lvl]]} | {longest_path[lvl + 2:]}) for child in children: parents = nx.ancestors(dependency_graph_between_bbl, child) if parents & children: children = children - {[child]} levels[lvl + 1] = children for child in children: self.bb2lvl[child] = lvl + 1 restNodes = list( set(dependency_graph_between_bbl.nodes) - set(self.bb2lvl.keys())) else: for lvl in range(0, level): levels[lvl + 1] = -1 return [levels, bb2lvl]
def count_predecessor(graph): number_of_predecessor = 0 for node in graph.nodes(): number_of_predecessor = number_of_predecessor + ( (len(nx.predecessor(graph, node))) - 1) return number_of_predecessor
def all_shortest_paths(G, a, b): """ Return a list of all shortest paths in graph G between nodes a and b """ ret = [] pred = nx.predecessor(G, b) if not pred.has_key(a): # b is not reachable from a return [] pth = [[a, 0]] pthlength = 1 # instead of array shortening and appending, which are relatively ind = 0 # slow operations, we will just overwrite array elements at position ind while ind >= 0: n, i = pth[ind] if n == b: ret.append(map(lambda x: x[0], pth[: ind + 1])) if len(pred[n]) > i: ind += 1 if ind == pthlength: pth.append([pred[n][i], 0]) pthlength += 1 else: pth[ind] = [pred[n][i], 0] else: ind -= 1 if ind >= 0: pth[ind][1] += 1 return ret
def _edge_betweenness(G, source, nodes=None, cutoff=False): """Edge betweenness helper.""" # get the predecessor data (pred, length) = nx.predecessor(G, source, cutoff=cutoff, return_seen=True) # order the nodes by path length onodes = [n for n, d in sorted(length.items(), key=itemgetter(1))] # initialize betweenness, doesn't account for any edge weights between = {} for u, v in G.edges(nodes): between[(u, v)] = 1.0 between[(v, u)] = 1.0 while onodes: # work through all paths v = onodes.pop() if v in pred: # Discount betweenness if more than one shortest path. num_paths = len(pred[v]) for w in pred[v]: if w in pred: # Discount betweenness, mult path num_paths = len(pred[w]) for x in pred[w]: between[(w, x)] += between[(v, w)] / num_paths between[(x, w)] += between[(w, v)] / num_paths return between
def all_shortest_paths(G, a, b): """ Return a list of all shortest paths in graph G between nodes a and b """ ret = [] pred = nx.predecessor(G, b) if not pred.has_key(a): # b is not reachable from a return [] pth = [[a, 0]] pthlength = 1 # instead of array shortening and appending, which are relatively ind = 0 # slow operations, we will just overwrite array elements at position ind while ind >= 0: n, i = pth[ind] if n == b: ret.append(map(lambda x: x[0], pth[:ind + 1])) if len(pred[n]) > i: ind += 1 if ind == pthlength: pth.append([pred[n][i], 0]) pthlength += 1 else: pth[ind] = [pred[n][i], 0] else: ind -= 1 if ind >= 0: pth[ind][1] += 1 return ret
def test_predecessor_cycle(self): G = nx.cycle_graph(4) pred = nx.predecessor(G, 0) assert_equal(pred[0], []) assert_equal(pred[1], [0]) assert_true(pred[2] in [[1, 3], [3, 1]]) assert_equal(pred[3], [0])
def _edge_betweenness(G, source, nodes, cutoff=False): """ Edge betweenness helper. """ between = {} # get the predecessor data #(pred,length)=_fast_predecessor(G,source,cutoff=cutoff) (pred, length) = nx.predecessor(G, source, cutoff=cutoff, return_seen=True) # order the nodes by path length onodes = [nn for dd, nn in sorted((dist, n) for n, dist in length.items())] # intialize betweenness, doesn't account for any edge weights for u, v in G.edges(nodes): between[(u, v)] = 1.0 between[(v, u)] = 1.0 while onodes: # work through all paths v = onodes.pop() if v in pred: num_paths = len(pred[v]) # Discount betweenness if more than for w in pred[v]: # one shortest path. if w in pred: num_paths = len(pred[w]) # Discount betweenness, mult path for x in pred[w]: between[(w, x)] += between[(v, w)] / num_paths between[(x, w)] += between[(w, v)] / num_paths return between
def Copy_all_shortest_paths_avoidnode(G, source, target, weight=None, avoid_node=None): if weight is not None: pred, dist = copy_dijkstra_predecessor_and_distance( G, source, weight=weight, avoid_node=avoid_node) else: pred = nx.predecessor(G, source) if target not in pred: raise Exception("No Path found with Given Bandwidth Constraint") stack = [[target, 0]] top = 0 while top >= 0: node, i = stack[top] if node == source: yield [p for p, n in reversed(stack[:top + 1])] if len(pred[node]) > i: top += 1 if top == len(stack): stack.append([pred[node][i], 0]) else: stack[top] = [pred[node][i], 0] else: stack[top - 1][1] += 1 top -= 1
def _node_betweenness(G, source, cutoff=False, normalized=True, weighted_edges=False): """Node betweenness helper: see betweenness_centrality for what you probably want. This actually computes "load" and not betweenness. See https://networkx.lanl.gov/ticket/103 This calculates the load of each node for paths from a single source. (The fraction of number of shortests paths from source that go through each node.) To get the load for a node you need to do all-pairs shortest paths. If weighted_edges is True then use Dijkstra for finding shortest paths. In this case a cutoff is not implemented and so is ignored. """ # get the predecessor and path length data if weighted_edges: (pred, length) = nx.dijkstra_predecessor_and_distance(G, source) else: (pred, length) = nx.predecessor(G, source, cutoff=cutoff, return_seen=True) # order the nodes by path length onodes = [(l, vert) for (vert, l) in length.items()] onodes.sort() onodes[:] = [vert for (l, vert) in onodes if l > 0] # intialize betweenness between = {}.fromkeys(length, 1.0) while onodes: v = onodes.pop() if v in pred: num_paths = len(pred[v]) # Discount betweenness if more than for x in pred[v]: # one shortest path. if x == source: # stop if hit source because all remaining v break # also have pred[v]==[source] between[x] += between[v] / float(num_paths) # remove source for v in between: between[v] -= 1 # rescale to be between 0 and 1 if normalized: l = len(between) if l > 2: scale = 1.0 / float( (l - 1) * (l - 2)) # 1/the number of possible paths for v in between: between[v] *= scale return between
def find_path(self, start=(0.0, 0.0, 0.0), end=(0.0, 0.0, 0.0)): ''' find the shortest path between start and end nodes on the graph using the euclidean metric instead of graph metric Output: list of nodes of shortest path and absolute distance ''' if (start == end): # prevent lack of sensical nodes return [], 0.0 #print 'find path, start, end: ',start,end if (len(nx.predecessor(self.graph, start, end)) > 0): # if reachable #path = nx.shortest_path(self.graph,start,end,'weight') path = nx.dijkstra_path(self.graph, start, end, 'weight') pl = nx.shortest_path_length(self.graph, start, end, 'weight') return path, pl else: # broken graph between start and end; no shortest path #print 'Broken graph case' psdict = nx.single_source_shortest_path(self.graph, start) sfromstart = [] sfromend = [] for k, entry in psdict.iteritems(): sfromstart.append(k) pedict = nx.single_source_shortest_path(self.graph, end) for k, entry in pedict.iteritems(): sfromend.append(k) breakpoints = [] for k in sfromstart: for l in sfromend: stnodlist = psdict[k] endnodlist = pedict[l] if len(stnodlist) > 1 and len(endnodlist) > 1: for stnod in stnodlist: for endnod in endnodlist: w = self.dist(stnod, endnod) #print 'breakpoint distance: %f'%w if w < 19.0: # radius error to arc length breakpoints.append((stnod, endnod, w)) if len(breakpoints) > 0: self.graph.add_weighted_edges_from(breakpoints) if (len(nx.predecessor(self.graph, start, end)) > 0): path = nx.dijkstra_path(self.graph, start, end, 'weight') pl = nx.shortest_path_length(self.graph, start, end, 'weight') return path, pl else: return [], 0.0
def _node_betweenness(G, source, cutoff=False, weight=None, destIdentifier='destinations'): """Node betweenness_centrality helper: See betweenness_centrality for what you probably want. This actually computes "partial centrality" and not betweenness. See https://networkx.lanl.gov/ticket/103 This calculates the load of each node for paths from a single source. (The fraction of number of shortests paths from source that go through each node.) To get the load for a node you need to do all-pairs shortest paths. If weight is not None then use Dijkstra for finding shortest paths. """ # get the predecessor and path length data if weight is None: (pred, length) = nx.predecessor(G, source, cutoff=cutoff, return_seen=True) else: (pred, length) = nx.dijkstra_predecessor_and_distance(G, source, cutoff, weight) for predecessor in pred: newlist = [] if len(pred[predecessor]) > 0: minimo = pred[predecessor][0] for elem in pred[predecessor]: if int(elem) < int(minimo): minimo = elem newlist.append(minimo) pred[predecessor][:] = newlist onodes = [(l, vert) for (vert, l) in length.items()] onodes.sort() onodes[:] = [vert for (l, vert) in onodes if l > 0] between = {}.fromkeys(length, 1.0) for node in G.nodes: if destIdentifier not in G.nodes[node]: between[node] = 0.0 # No stub nodes does not propagate any contribute else: between[node] = 1.0 # Stub nodes propagate 1 contribute while onodes: v = onodes.pop() if v in pred: num_paths = len(pred[v]) # Discount betweenness if more than for x in pred[v]: # one shortest path. if x == source: # stop if hit source break # also have pred[v]==[source] between[x] += between[v] / float(num_paths) for node in G.nodes: if destIdentifier in G.nodes[node]: between[node] -= 1.0 return between
def LCA(n1, n2, taxlist): G = nx.DiGraph() G.add_edges_from(taxlist) #will construct directed graph with edges of (parent,child) tuples and nodes for every parent and child GR = G.reverse() #Reverses the nodes s.t. root is last so max will be lowest common ancestor to both nodes preds_1 = nx.predecessor( GR, n1) # NOTE: Assign n1 and n2 for preds_1 and preds_2 preds_2 = nx.predecessor(GR, n2) common_preds = set([n for n in preds_1 ]).intersection(set([n for n in preds_2])) LCA = max(common_preds, key=lambda n: preds_1[n]) if max(common_preds) == n1: print("LCA is : %d" % n1) elif max(common_preds) == n2: print("LCA is : %d" % n2) else: print("LCA is : %d" % LCA)
def init(): mesh = MonotoneSystem() mesh.rectangular_mesh((0, 0), (1, 1), (10, 10)) a = 0.1 inputs = [ np.array([0, 0]), np.array([0, a]), np.array([a, 0]), np.array([-a, 0]), np.array([0, -a]) ] fts = mesh.compute_FTS(inputs) init_elem = list(mesh.collision(np.array([0, 0]), np.array([0.2, 0.2]))) avoid_elem = list( mesh.collision(np.array([0.4, 0.4]), np.array([0.6, 0.6]))) final_elem = list( mesh.collision(np.array([0.8, 0.8]), np.array([1.0, 1.0]))) init = random.choice(init_elem) fts.graph['initial'] = set([init]) final = random.choice(final_elem) predecessor = nx.predecessor(fts, final) paths = [[k] + v for k, v in predecessor.items()] used_edges = list( flatten([zip(p[1:], p[0:-1]) for p in paths if len(p) > 1])) used_edges = list( flatten([zip(p[0:-1], p[1:]) for p in paths if len(p) > 1])) used_edges.append((final, final)) fts.remove_edges_from(set(fts.edges()).difference(set(used_edges))) fts[final][final]['control'] = np.array([0, 0]) fts[final][final]['label'] = str(np.array([0, 0])) #fts.show("lkjlkj") env = MonotoneEnvironment(mesh) for i, elem in enumerate(mesh.elements): if elem in init_elem: env.regions[i] = "i" if elem in avoid_elem: env.regions[i] = "c" if elem in final_elem: env.regions[i] = "f" print env.regions q1 = Quad("q1", "i", env) planner = MonotonePlanner(q1, mesh, fts) sim = Simulator() sim.add("q1", q1) sim.add("q1_planner", planner) return sim, env
def is_leaf(g=nx.DiGraph(), node=""): """ Un noeud différent de la source est un noeud feuille si son dégré vaut 1 :param g: :param node: :return: """ if len(nx.predecessor(g, node)) == g.degree(node) == 1: return True else: return False
def _node_betweenness(G, source, cutoff=False, normalized=True, weight=None): """Node betweenness_centrality helper: See betweenness_centrality for what you probably want. This actually computes "load" and not betweenness. See https://networkx.lanl.gov/ticket/103 This calculates the load of each node for paths from a single source. (The fraction of number of shortests paths from source that go through each node.) To get the load for a node you need to do all-pairs shortest paths. If weight is not None then use Dijkstra for finding shortest paths. """ # get the predecessor and path length data if weight is None: (pred, length) = nx.predecessor(G, source, cutoff=cutoff, return_seen=True) else: (pred, length) = nx.dijkstra_predecessor_and_distance(G, source, cutoff, weight) # order the nodes by path length onodes = [(l, vert) for (vert, l) in length.items()] onodes.sort() onodes[:] = [vert for (l, vert) in onodes if l > 0] # intialize betweenness between = {}.fromkeys(length, 1.0) while onodes: v = onodes.pop() if v in pred: num_paths = len(pred[v]) # Discount betweenness if more than for x in pred[v]: # one shortest path. if x == source: # stop if hit source because all remaining v break # also have pred[v]==[source] between[x] += between[v] / float(num_paths) # remove source for v in between: between[v] -= 1 # rescale to be between 0 and 1 if normalized: l = len(between) if l > 2: # scale by 1/the number of possible paths scale = 1.0 / float((l - 1) * (l - 2)) for v in between: between[v] *= scale return between
def one(input): """ >>> one(['light red bags contain 1 bright white bag, 2 muted yellow bags.','dark orange bags contain 3 bright white bags, 4 muted yellow bags.','bright white bags contain 1 shiny gold bag.','muted yellow bags contain 2 shiny gold bags, 9 faded blue bags.','shiny gold bags contain 1 dark olive bag, 2 vibrant plum bags.','dark olive bags contain 3 faded blue bags, 4 dotted black bags.','vibrant plum bags contain 5 faded blue bags, 6 dotted black bags.','faded blue bags contain no other bags.','dotted black bags contain no other bags.']) 4 """ G = nx.DiGraph() for row in input: data = [''.join(x.split(' ')[:2]) for x in re.split(r'[0-9] ', row)] parent = data[0] if len(data) > 1: for child in data[1:]: G.add_edge(child, parent) return len(nx.predecessor(G, 'shinygold')) - 1
def reorganizeNet(self): H = self.net.copy() self.net = nx.Graph() for source in H.nodes(): if H.node[source]['type'] == 'subreddit': predecessors = nx.predecessor(H, source, cutoff=2) for dest in predecessors: if (H.node[dest]['type'] == 'subreddit' and source != dest): for via in predecessors[dest]: self.add_weighted_edge([source, dest]) self.net.node[source]['nUsers'] = H.degree(source)
def init(): mesh = MonotoneSystem() mesh.rectangular_mesh((0,0),(1,1),(10,10)) a = 0.1 inputs = [np.array([0,0]),np.array([0,a]),np.array([a,0]),np.array([-a,0]),np.array([0,-a])] fts = mesh.compute_FTS(inputs) init_elem = list(mesh.collision(np.array([0,0]),np.array([0.2,0.2]))) avoid_elem = list(mesh.collision(np.array([0.4,0.4]),np.array([0.6,0.6]))) final_elem = list(mesh.collision(np.array([0.8,0.8]),np.array([1.0,1.0]))) init = random.choice(init_elem) fts.graph['initial'] = set([init]) final = random.choice(final_elem) predecessor = nx.predecessor(fts,final) paths = [[k]+v for k,v in predecessor.items()] used_edges = list(flatten([ zip(p[1:],p[0:-1]) for p in paths if len(p)>1 ])) used_edges = list(flatten([ zip(p[0:-1],p[1:]) for p in paths if len(p)>1 ])) used_edges.append((final,final)) fts.remove_edges_from(set(fts.edges()).difference(set(used_edges))) fts[final][final]['control'] = np.array([0,0]) fts[final][final]['label'] = str(np.array([0,0])) #fts.show("lkjlkj") env = MonotoneEnvironment(mesh) for i,elem in enumerate(mesh.elements): if elem in init_elem: env.regions[i] = "i" if elem in avoid_elem: env.regions[i] = "c" if elem in final_elem: env.regions[i] = "f" print env.regions q1 = Quad("q1","i",env) planner = MonotonePlanner(q1,mesh,fts) sim = Simulator() sim.add("q1",q1) sim.add("q1_planner",planner) return sim,env
def find_path(self, start=(0.0, 0.0, 0.0), end=(0.0, 0.0, 0.0)): ''' find the shortest path between start and end nodes on the graph using the euclidean metric instead of graph metric Output: list of nodes of shortest path and absolute distance ''' if (start == end): # prevent lack of sensical nodes return [], 0.0 if (len(nx.predecessor(self.graph, start, end)) > 0): # if reachable #path = nx.shortest_path(self.graph,start,end,'weight') path = nx.dijkstra_path(self.graph, start, end, 'weight') pl = nx.shortest_path_length(self.graph, start, end, 'weight') return path, pl else: # broken graph between start and end; no shortest path return [], 0.0
def generate_attributes(): rpath = '/network/rit/lab/ceashpc/share_data/GraphOpt/datasets/epinions' wpath = '/network/rit/lab/ceashpc/share_data/GraphOpt/datasets/epinions' fn = 'graph.pkl' with open(os.path.join(rpath, fn), 'rb') as rfile: graph = pickle.load(rfile) num_nodes = graph.number_of_nodes() # random walk start_node = next_node = np.random.choice(range(num_nodes)) subgraph = set() subgraph.add(start_node) restart = 0.1 count = 1000 while True: if len(subgraph) >= count: break successors = [node for node in nx.neighbors(graph, next_node)] predecessor = [node for node in nx.predecessor(graph, next_node)] neighbors = successors + predecessor # note, python extend not return self if np.random.uniform() > restart: next_node = np.random.choice(neighbors) else: # restart next_node = start_node subgraph.add(next_node) print(len(subgraph)) mean_1 = 5. mean_2 = 0. std = 1. attributes = np.zeros(graph.number_of_nodes()) for node in graph.nodes(): if node in subgraph: attributes[node] = np.random.normal(mean_1, std) else: attributes[node] = np.random.normal(mean_2, std) fn = 'attributes.pkl' with open(os.path.join(wpath, fn), 'wb') as wfile: pickle.dump({'attributes': attributes, 'subgraph': subgraph}, wfile)
def BuildForest(self): """ Using the Maximum Spanning Tree, we will now 'prune' the tree by removed edges where weight (Mutual Info) is less than mean(all_weights) For each class variable we're trying to predict: 1. Build DAG with from Maximum Spanning Tree using the root node from self.SetRoots; all edges point away from root. """ MST = self.MST ## dictionary(class: list of tuples) ## Step 1: Build DAG DAG = {} for key, mst in MST.items(): root = self.Root pred = nx.predecessor(mst, root) edges = [] weights = [] ## U: Child ## V: Parent, thus Parent can be None for Roots for u, v in pred.items(): if len(v) > 0: v = v[0] edge_data = mst.get_edge_data(u,v) w = -1*edge_data['weight'] weights.append(w) else: v = None w = 0 edges.append((u,v,w)) avgweight = np.mean(weights) ## new rule: ## If weight is less than avg, break conditional probs final_edges = [] print(f"\nClass: {key} || Directed Graph \n(child <-- parent): ") print("--------------------------------") for u, v, w in edges: if w < avgweight: v = None final_edges.append((u,v)) print(f"{u} <-- {v}") DAG[key] = final_edges return DAG
def optimal_point_option( g, gr, dest, max_length ): """Create an option that takes all connected states to dest""" paths = nx.predecessor(gr, source=dest, cutoff = max_length) I = set( paths.keys() ) I.remove( dest ) pi = {} for src, succ in paths.items(): if src == dest: continue # Next link in the path succ = succ[ 0 ] # Choose the maximum probability action for this edge actions = [ (attrs['action'], attrs['pr']) for src, succ_, attrs in g.edges( src, data=True ) if succ_ == succ ] action = max( actions, key = lambda (a,pr): pr )[ 0 ] pi[ src ] = ((action, 1.0),) B = { dest : 1.0 } return Option( I, pi, B )
def optimal_point_option(g, gr, dest, max_length): """Create an option that takes all connected states to dest""" paths = nx.predecessor(gr, source=dest, cutoff=max_length) I = set(paths.keys()) I.remove(dest) pi = {} for src, succ in paths.items(): if src == dest: continue # Next link in the path succ = succ[0] # Choose the maximum probability action for this edge actions = [(attrs['action'], attrs['pr']) for src, succ_, attrs in g.edges(src, data=True) if succ_ == succ] action = max(actions, key=lambda (a, pr): pr)[0] pi[src] = ((action, 1.0), ) B = {dest: 1.0} return Option(I, pi, B)
def BuildDAG(self): """ From MST build a dag by choosing a column to be a root """ MST = self.MST ## dictionary(class: list of tuples) #modelprobs = self.MIresults ## dictionary {class: dataframe} DAG = {} for key, mst in MST.items(): root = self.Roots[key] pred = nx.predecessor(mst, root) print(pred) edges = [] for u, v in pred.items(): if len(v) > 0: v = v[0] else: v = None ## U: Child ## V: Parent, thus Parent can be None for Roots edges.append((u, v)) DAG[key] = edges return DAG
def Copy_all_shortest_paths_avoidnode(G, source, target, weight=None,avoid_node=None): if weight is not None: pred,dist = copy_dijkstra_predecessor_and_distance(G,source,weight=weight,avoid_node=avoid_node) else: pred = nx.predecessor(G,source) if target not in pred: raise Exception("No Path found with Given Bandwidth Constraint") stack = [[target,0]] top = 0 while top >= 0: node,i = stack[top] if node == source: yield [p for p,n in reversed(stack[:top+1])] if len(pred[node]) > i: top += 1 if top == len(stack): stack.append([pred[node][i],0]) else: stack[top] = [pred[node][i],0] else: stack[top-1][1] += 1 top -= 1
def all_shortest_paths(G, source, target, weight=None): if weight is not None: pred,dist = nx.dijkstra_predecessor_and_distance(G,source,weight=weight) else: pred = nx.predecessor(G,source) if target not in pred: raise nx.NetworkXNoPath() stack = [[target,0]] top = 0 while top >= 0: node,i = stack[top] if node == source: yield [p for p,n in reversed(stack[:top+1])] if len(pred[node]) > i: top += 1 if top == len(stack): stack.append([pred[node][i],0]) else: stack[top] = [pred[node][i],0] else: stack[top-1][1] += 1 top -= 1
def test_predecessor_cutoff(self): G=nx.path_graph(4) p = nx.predecessor(G,0,3) assert_false(4 in p)
def test_predecessor_path(self): G = nx.path_graph(4) assert_equal(nx.predecessor(G, 0), {0: [], 1: [0], 2: [1], 3: [2]}) assert_equal(nx.predecessor(G, 0, 3), [2])
# isolates nx.is_isolate(G, 1) # False nx.is_isolate(G, 5) # True # HITS nx.hits(G,max_iter=1000) # cannot converge? # maximal independent set nx.maximal_independent_set(G) # shortest path nx.shortest_path(G) # need "predecessors_iter" nx.all_pairs_shortest_path(G) nx.all_pairs_shortest_path_length(G) nx.predecessor(G, 1) nx.predecessor(G, 1, 378) nx.dijkstra_path(G, 1, 300) nx.dijkstra_path_length(G, 1, 300) nx.single_source_dijkstra_path(G, 1) nx.single_source_dijkstra_path_length(G, 1) nx.all_pairs_dijkstra_path(G) nx.all_pairs_dijkstra_path_length(G) nx.bellman_ford(G, 1) # Traversal list(nx.dfs_edges(G)) list(nx.dfs_edges(G, 1)) nx.dfs_tree(G) # return a networkx graph
def all_shortest_paths(G, source, target, weight=None): """Compute all shortest paths in the graph. Parameters ---------- G : NetworkX graph source : node Starting node for path. target : node Ending node for path. weight : None or string, optional (default = None) If None, every edge has weight/distance/cost 1. If a string, use this edge attribute as the edge weight. Any edge attribute not present defaults to 1. Returns ------- paths: generator of lists A generator of all paths between source and target. Examples -------- >>> G=nx.Graph() >>> G.add_path([0,1,2]) >>> G.add_path([0,10,2]) >>> print([p for p in nx.all_shortest_paths(G,source=0,target=2)]) [[0, 1, 2], [0, 10, 2]] Notes ----- There may be many shortest paths between the source and target. See Also -------- shortest_path() single_source_shortest_path() all_pairs_shortest_path() """ if weight is not None: pred,dist = nx.dijkstra_predecessor_and_distance(G,source,weight=weight) else: pred = nx.predecessor(G,source) if target not in pred: raise nx.NetworkXNoPath() stack = [[target,0]] top = 0 while top >= 0: node,i = stack[top] if node == source: yield [p for p,n in reversed(stack[:top+1])] if len(pred[node]) > i: top += 1 if top == len(stack): stack.append([pred[node][i],0]) else: stack[top] = [pred[node][i],0] else: stack[top-1][1] += 1 top -= 1
def dep_pathtoroot(sent,child): #print(child, nx.predecessor(sent,child), nx.descendants(sent,child), sent[dep_head_of(sent,child)][child]["deprel"]) return nx.predecessor(sent,child)
import networkx as nx g = nx.read_edgelist('/home/dchen/git/vbi/sdal/mann2_simulations/src/simulations/lens_recurrent_attitudeDiffusion/edge_list_nx.gz') nx.predecessor(g, )
def drawSubPedigree( self, db_vervet=None, DG=None, reverseDG=None, outputFnamePrefix=None, monkeyPair=None, monkeyPairData=None, monkeyPair2data=None, minEdgeColor=None, maxEdgeColor=None, sex2NodePropertyList=None, defaultEdgeWidth=25, **keywords ): """ 2012.9.4 copied from vervet/src/misc.py 2012.2.10 add argument monkeyCoverageFname to override coverage data from db 2011-5-6 """ sys.stderr.write("Drawing sub-pedigree ... ") # nx.draw_circular(DG,with_labels=False, alpha=0.5) pylab.clf() pylab.axis("off") axe_pvalue = pylab.axes([-0, -0, 1, 0.93], frameon=False) # left gap, bottom gap, width, height. pylab.figure( axe_pvalue.figure.number ) # , figsize=(100, 60)) #figure size was set in the beginning of the program. # fig = matplotlib.pyplot.gcf() # fig.set_size_inches(185, 60) # pylab.figure(figsize=(100, 60)) monkey1ID = monkeyPair[0] monkey2ID = monkeyPair[1] dbID1 = db_vervet.getIndividualDBEntry(ucla_id=monkey1ID).id dbID2 = db_vervet.getIndividualDBEntry(ucla_id=monkey2ID).id nodeSet = set([dbID1, dbID2]) # add all ancestors for dbID in [dbID1, dbID2]: for node, bridgeList in nx.predecessor( reverseDG, source=dbID, target=None ).iteritems(): # all ancestors of dbID. # predecessor in the path from dbID to all nodes in reverseDG # bridgeList only contains the the immediate predecessor to node, not the whole path from dbID to node. nodeSet.add(node) for node in DG.successors(dbID): # children of dbID nodeSet.add(node) for parentNode in DG.predecessors(node): # other parents of children of dbID nodeSet.add(parentNode) nodeList = list(nodeSet) subDG = DG.subgraph(nodeList) sys.stderr.write(" %s nodes %s edges ... " % (subDG.number_of_nodes(), subDG.number_of_edges())) title = "outlier: %s-%s, kinship=%.3f, IBD=%.3f, IBDVector=%s" % ( monkey1ID, monkey2ID, monkeyPairData.kinship, monkeyPairData.IBD, monkeyPairData.IBDVectorStr, ) pylab.title(title, fontsize=80) layout = "dot" pos = nx.graphviz_layout(subDG, prog=layout) nx.draw_networkx_edges( subDG, pos, edgelist=subDG.edges(), alpha=0.2, width=self.defaultEdgeWidth, style="dashed", arrows=False ) self.drawGraphNodes(subDG, pos, sex2NodePropertyList) labels = {} # dicitonary to pass into draw_networkx_labels for n in subDG.nodes(): individual = db_vervet.getIndividualDBEntryViaDBID(n) labels[n] = individual.code nx.draw_networkx_labels( subDG, pos, labels=labels, font_size=50, font_color="k", font_family="sans-serif", font_weight="normal", alpha=0.5, ax=None, ) # nx.draw_graphviz(DG, prog=layout,with_labels=False, alpha=0.5) # draw outlier edges in the end because the new axes() would change the current figure in pylab if monkeyPair2data and minEdgeColor and maxEdgeColor: self.drawOutlierEdge( subDG, db_vervet=db_vervet, pos=pos, monkeyPair2data=monkeyPair2data, minEdgeColor=minEdgeColor, maxEdgeColor=maxEdgeColor, alpha=0.6, edgeWidth=4.0, ) if monkeyPair2data and minEdgeColor and maxEdgeColor: self.drawEdgeColorLegend(subDG, pos=pos, minEdgeColor=minEdgeColor, maxEdgeColor=maxEdgeColor) pylab.savefig("%s_graphviz_%s_graph.png" % (outputFnamePrefix, layout), dpi=30) sys.stderr.write(".\n")
def get_states_connected_to_accept(self): S = set([]) for n in self.graph['accept']: pre = list(itertools.chain(*nx.predecessor(self,n).values())) S = S.union(set(pre)) return S
def dsubgraph_nodes(inp, out, nbunch): pred = NX.predecessor(G, inp, out) nbunch += pred for node in pred: dsubgraph_nodes(inp, node, nbunch) return nbunch