def kosaraju_strongly_connected_components(G,source=None): """Return nodes in strongly connected components of graph. Parameters ---------- G : NetworkX Graph An directed graph. Returns ------- comp : list of lists A list of nodes for each component of G. The list is ordered from largest connected component to smallest. See Also -------- connected_components Notes ----- Uses Kosaraju's algorithm. """ components=[] post=nx.dfs_postorder(G,source=source,reverse_graph=True) seen={} while post: r=post.pop() if r in seen: continue c=nx.dfs_preorder(G,r) new=[v for v in c if v not in seen] seen.update([(u,True) for u in new]) components.append(new) components.sort(key=len,reverse=True) return components
def buildCyclePathsForLineSegments(segments): g = networkx.Graph() # First, add the nodes and construct edges between them. for i in segments: start_node = point_round(i.sx, i.sy) end_node = point_round(i.ex, i.ey) g.add_node(start_node, startpoint=(i.sx, i.sy, i.width)) g.add_node(end_node, startpoint=(i.ex, i.ey, i.width)) g.add_edge(start_node, end_node) # separate into disjoint subcomponents component_nodes = networkx.connected_components(g) # create graphs from node ids for all subcomponents component_graphs = [g.subgraph(i).copy() for i in component_nodes] del g outline_paths = [] for subgraph in component_graphs: # If the graph is not of degree 2, then it can't be a pure cycle # since we've already established it is a completely connected loop if all([degree == 2 for degree in subgraph.degree().itervalues()]): # Transversal of the graph starting from a random node ordered = networkx.dfs_preorder(subgraph) # Path = (x,y) coordinates for each node in the ordered transversal path = [subgraph.node[j]['startpoint'] for j in ordered] outline_paths.append(path) return outline_paths
def buildCyclePathsForLineSegments(segments): g = networkx.Graph() # First, add the nodes and construct edges between them. for i in segments: start_node = point_round(i.sx, i.sy) end_node = point_round(i.ex, i.ey) g.add_node(start_node, startpoint=(i.sx, i.sy, i.width)) g.add_node(end_node, startpoint=(i.ex, i.ey, i.width)) g.add_edge(start_node, end_node) # separate into disjoint subcomponents component_nodes = networkx.connected_components(g) # create graphs from node ids for all subcomponents component_graphs = [g.subgraph(i).copy() for i in component_nodes] del g outline_paths = [] for subgraph in component_graphs: # If the graph is not of degree 2, then it can't be a pure cycle # since we've already established it is a completely connected loop if all([degree == 2 for degree in subgraph.degree()]): # Transversal of the graph starting from a random node ordered = networkx.dfs_preorder(subgraph) # Path = (x,y) coordinates for each node in the ordered transversal path = [subgraph.node[j]['startpoint'] for j in ordered] outline_paths.append(path) return outline_paths
def _execute_in_series(self, updatehash=False, force_execute=None): """Executes a pre-defined pipeline in a serial order. Parameters ---------- updatehash : boolean Allows one to rerun a pipeline and update all the hashes without actually executing any of the underlying interfaces. This is useful when moving the working directory from one location to another. It is also useful when the hashing function itself changes (although we hope that this will not happen often). default [False] force_execute : list of strings This forces execution of a node even if updatehash is True """ # In the absence of a dirty bit on the object, generate the # parameterization each time before running logger.info("Running serially.") old_wd = os.getcwd() notrun = [] donotrun = [] for node in nx.topological_sort(self._execgraph): # Assign outputs from dependent executed nodes to current node. # The dependencies are stored as data on edges connecting # nodes. try: if node in donotrun: continue for edge in self._execgraph.in_edges_iter(node): data = self._execgraph.get_edge_data(*edge) logger.debug('setting input: %s->%s %s', edge[0], edge[1], str(data)) for sourceinfo, destname in data['connect']: self._set_node_input(node, destname, edge[0], sourceinfo) self._set_output_directory_base(node) redo = None if force_execute: if isinstance(force_execute, str): force_execute = [force_execute] redo = any([node.name.lower()==l.lower() \ for l in force_execute]) if updatehash and not redo: node.run(updatehash=updatehash) else: node.run(force_execute=redo) except: os.chdir(old_wd) if config.getboolean('execution', 'stop_on_first_crash'): raise # bare except, but i really don't know where a # node might fail crashfile = node._report_crash(execgraph=self._execgraph) # remove dependencies from queue subnodes = nx.dfs_preorder(self._execgraph, node) notrun.append(dict(node = node, dependents = subnodes, crashfile = crashfile)) donotrun.extend(subnodes) _report_nodes_not_run(notrun)
def _remove_node_deps(self, jobid, crashfile): subnodes = nx.dfs_preorder(self._execgraph, self.procs[jobid]) for node in subnodes: idx = self.procs.index(node) self.proc_done[idx] = True self.proc_pending[idx] = False return dict(node = self.procs[jobid], dependents = subnodes, crashfile = crashfile)
def check_if_open(self): """check if the partition is open according to the given poset.""" for block in self.graph.nodes(): # let's build the union of the filters of every node in the current block filtro = set() for vertex in self.partition_elements[block]: filtro = filtro.union(set(nx.dfs_preorder(self.poset, source = vertex))) blocks_union = set() # let's build the union of the blocks containing at least one node of the filter for vertex in filtro: for block in self.partition_elements: if vertex in block: blocks_union = blocks_union.union(block) break # let's check if they are the same set, or not if filtro != blocks_union: return False return True
def _generate_expanded_graph(graph_in): """Generates an expanded graph based on node parameterization Parameterization is controlled using the `iterables` field of the pipeline elements. Thus if there are two nodes with iterables a=[1,2] and b=[3,4] this procedure will generate a graph with sub-graphs parameterized as (a=1,b=3), (a=1,b=4), (a=2,b=3) and (a=2,b=4). """ logger.debug("PE: expanding iterables") moreiterables = True # convert list of tuples to dict fields for node in graph_in.nodes(): if isinstance(node.iterables, tuple): node.iterables = [node.iterables] for node in graph_in.nodes(): if isinstance(node.iterables, list): node.iterables = dict(map(lambda(x):(x[0], lambda:x[1]), node.iterables)) while moreiterables: nodes = nx.topological_sort(graph_in) nodes.reverse() inodes = [node for node in nodes if len(node.iterables.keys())>0] if inodes: node = inodes[0] iterables = node.iterables.copy() node.iterables = {} node._id += 'I' subnodes = nx.dfs_preorder(graph_in, node) subgraph = graph_in.subgraph(subnodes) graph_in = _merge_graphs(graph_in, subnodes, subgraph, node._id, iterables) else: moreiterables = False logger.debug("PE: expanding iterables ... done") return graph_in
activeVars.add(e["from"]) for zeroVar in set(influencing)-activeVars: validDomain[zeroVar]=range(1) #should it be the base value? inputvals=[x for x in itertools.product(*validDomain.values())] for x in inputvals: inpVal=zip(tuple(validDomain.keys()),x) inpVal.sort() table[tuple(inpVal)]=param["val"] return table focalParamsValues={} #get the genes with the smallest focal table # genes=get_best_order(nxG) genes=nx.dfs_preorder(nxG) #for the SP6 model, immediate answer, optimal, obtained with node order heuristic applied on Fz1 # genes=['Fz1', 'Dsh1', 'Nkd1', 'Slp1', 'En1', 'Ci1', 'Ciact1', 'Wg1', 'Cirep1', 'Hh1', 'Ptc1', 'Pka1', 'Fz2', 'Dsh2', 'Nkd2', 'Slp2', 'En2', 'Ci2', 'Ciact2', 'Wg2', 'Cirep2', 'Hh2', 'Ptc2', 'Pka2', 'Fz3', 'Dsh3', 'Nkd3', 'Slp3', 'En3', 'Ci3', 'Ciact3', 'Wg3', 'Cirep3', 'Hh3', 'Ptc3', 'Pka3', 'Fz4', 'Dsh4', 'Nkd4', 'Slp4', 'En4', 'Ci4', 'Ciact4', 'Wg4', 'Cirep4', 'Hh4', 'Ptc4', 'Pka4', 'Fz5', 'Dsh5', 'Nkd5', 'Slp5', 'En5', 'Ci5', 'Ciact5', 'Wg5', 'Cirep5', 'Hh5', 'Fz6', 'Dsh6', 'Nkd6', 'Ptc5', 'Pka5', 'Slp6', 'En6', 'Ci6', 'Ciact6', 'Ptc6', 'Pka6', 'Cirep6', 'Hh6', 'Wg6'] print ",".join(genes) def ssAreCompatible(ss1,ss2): #assume the shortest is ss2 d1=dict(ss1) for k,v in ss2: if k in d1 and d1[k]!=v: return False return True
def test_preorder(self): assert_equal(nx.dfs_preorder(self.G,source=0),[0, 1, 2, 4, 3])
def test_preorder(self): assert_equal(nx.dfs_preorder(self.G, source=0), [0, 1, 2, 4, 3])
def send_map(self, stamp): if len(self.skel.nodes) < 4: return TG = nx.MultiDiGraph() for i in self.skel.nodes: TG.add_node(i) for (a,b,p,c) in self.skel.every_edge + list(self.wheel_odom_edges): TG.add_edge(a, b, (p, c)) here = max(self.skel.nodes) # TG is the total graph, G is the local subgraph uTG = TG.to_undirected() print "uTG", uTG.nodes(), uTG.edges() close = set([here]) for i in range(self.optimization_distance): close |= set(nx.node_boundary(uTG, close)) print "close", close G = TG.subgraph(close) uG = uTG.subgraph(close) pg = TreeOptimizer3() def mk_covar(xyz, rp, yaw): return (1.0 / math.sqrt(xyz),1.0 / math.sqrt(xyz), 1.0 / math.sqrt(xyz), 1.0 / math.sqrt(rp), 1.0 / math.sqrt(rp), 1.0 / math.sqrt(yaw)) weak = mk_covar(9e10,3,3) strong = mk_covar(0.0001, 0.000002, 0.00002) pg.initializeOnlineOptimization() def pgadd(p, n, data): relpose, strength = data if strength == 0.0: cov = weak else: cov = strong pg.addIncrementalEdge(p, n, relpose.xform(0,0,0), relpose.euler(), cov) Gedges = G.edges(data = True) revmap = {} map = {} for n in nx.dfs_preorder(uG, here): revmap[len(map)] = n map[n] = len(map) priors = [p for p in uG.neighbors(n) if p in map] if priors == []: print "START NODE", n, "as", map[n] else: print "NEXT NODE", n p = priors[0] if not G.has_edge(p, n): n,p = p,n data = G.get_edge_data(p, n)[0] Gedges.remove((p, n, data)) pgadd(map[p], map[n], data) for (p,n,d) in Gedges: pgadd(map[p], map[n], d) pg.initializeOnlineIterations() start_error = pg.error() for i in range(10): pg.iterate() print "Error from", start_error, "to", pg.error() pg.recomputeAllTransformations() print self.tf.getFrameStrings() target_frame = "base_link" t = self.tf.getLatestCommonTime("wide_stereo_optical_frame", target_frame) trans,rot = self.tf.lookupTransform(target_frame, "wide_stereo_optical_frame", t) xp = Pose() xp.fromlist(self.tf.fromTranslationRotation(trans,rot)) roadmap_nodes = dict([ (n, (30,0,0)) for n in TG.nodes() ]) nl = sorted(roadmap_nodes.keys()) print "nl", nl for i in sorted(map.values()): xyz,euler = pg.vertex(i) p = from_xyz_euler(xyz, euler) pose_in_target = xp * p x,y,z = pose_in_target.xform(0,0,0) euler = pose_in_target.euler() print x,y,z, euler roadmap_nodes[revmap[i]] = (x, y, euler[2]) roadmap_edges = [] for (p,n) in set(TG.edges()): print p,n best_length = max([ (confidence, pose.distance()) for (pose, confidence) in TG.get_edge_data(p, n).values()])[1] roadmap_edges.append((p, n, best_length)) msg = vslam.msg.Roadmap() msg.header.stamp = stamp msg.header.frame_id = "base_link" msg.nodes = [vslam.msg.Node(*roadmap_nodes[n]) for n in nl] print "(p,n)", [ (p,n) for (p,n,l) in roadmap_edges ] msg.edges = [vslam.msg.Edge(nl.index(p), nl.index(n), l) for (p, n, l) in roadmap_edges] msg.localization = nl.index(here) self.pub.publish(msg) if 1: import matplotlib.pyplot as plt fig = plt.figure(figsize = (12, 6)) plt.subplot(121) pos = nx.spring_layout(TG, iterations=1000) nx.draw(TG, pos,node_color='#A0CBE2') nx.draw_networkx_edges(G, pos, with_labels=False, edge_color='r', alpha=0.5, width=25.0, arrows = False) plt.subplot(122) nx.draw(G, pos = dict([(n, roadmap_nodes[n][:2]) for n in G.nodes()])) #plt.show() plt.savefig("/tmp/map_%d.png" % here)