def test_dfs_very_deep_graph(self): gr = pygraph.classes.graph.graph() gr.add_nodes(range(0,20001)) for i in range(0,20000): gr.add_edge((i,i+1)) recursionlimit = getrecursionlimit() depth_first_search(gr, 0) assert getrecursionlimit() == recursionlimit
def test_mutual_accessibility_in_digraph(self): gr = testlib.new_digraph() ma = mutual_accessibility(gr) for n in gr: for m in gr: if (m in ma[n]): assert m in depth_first_search(gr, n)[0] assert n in depth_first_search(gr, m)[0] else: assert m not in depth_first_search(gr, n)[0] or n not in depth_first_search(gr, m)[0]
def testGraphDFS(self): G = pygraph.graph() G.add_nodes([1, 2, 3, 4, 5]) G.add_edge(1, 2) G.add_edge(2, 3) G.add_edge(2, 4) G.add_edge(4, 5) G.add_edge(1, 5) G.add_edge(3, 5) st, pre, post = depth_first_search(G, 1, filter=filters.find(5)) assert st == {1: None, 2: 1, 3: 2, 5: 3} st, pre, post = depth_first_search(G, 1, filter=filters.find(2)) assert st == {1: None, 2: 1}
def test_connected_components_in_graph(self): gr = testlib.new_graph() gr.add_nodes(['a','b','c']) gr.add_edge(('a','b')) cc = connected_components(gr) for n in gr: for m in gr: if (cc[n] == cc[m]): assert m in depth_first_search(gr, n)[0] else: assert m not in depth_first_search(gr, n)[0]
def test_connected_components_in_graph(self): gr = testlib.new_graph() gr.add_nodes(['a', 'b', 'c']) gr.add_edge(('a', 'b')) cc = connected_components(gr) for n in gr: for m in gr: if (cc[n] == cc[m]): assert m in depth_first_search(gr, n)[0] else: assert m not in depth_first_search(gr, n)[0]
def test_mutual_accessibility_in_graph(self): gr = testlib.new_graph() gr.add_nodes(['a','b','c']) gr.add_edge(('a','b')) gr.add_edge(('a','c')) ma = mutual_accessibility(gr) for n in gr: for m in gr: if (m in ma[n]): assert m in depth_first_search(gr, n)[0] assert n in depth_first_search(gr, m)[0] else: assert m not in depth_first_search(gr, n)[0] or n not in depth_first_search(gr, m)[0]
def test_accessibility_in_digraph(self): gr = testlib.new_digraph() gr.add_nodes(['a','b','c']) gr.add_edge(('a','b')) gr.add_edge(('a','c')) ac = accessibility(gr) for n in gr: for m in gr: if (m in ac[n]): assert m in depth_first_search(gr, n)[0] else: assert m not in depth_first_search(gr, n)[0]
def test_accessibility_in_digraph(self): gr = testlib.new_digraph() gr.add_nodes(['a', 'b', 'c']) gr.add_edge(('a', 'b')) gr.add_edge(('a', 'c')) ac = accessibility(gr) for n in gr: for m in gr: if (m in ac[n]): assert m in depth_first_search(gr, n)[0] else: assert m not in depth_first_search(gr, n)[0]
def test_minimal_spanning_tree_on_graph(self): gr = new_graph(wt_range=(1,10)) mst = minimal_spanning_tree(gr, root=0) wt = tree_weight(gr, mst) len_dfs = len(depth_first_search(gr, root=0)[0]) for each in mst: if (mst[each] != None): mst_copy = deepcopy(mst) del(mst_copy[each]) for other in gr[each]: mst_copy[each] = other if (tree_weight(gr, mst_copy) < wt): gr2 = graph() add_spanning_tree(gr2, mst_copy) assert len(depth_first_search(gr2, root=0)[0]) < len_dfs
def test_mutual_accessibility_in_graph(self): gr = testlib.new_graph() gr.add_nodes(['a', 'b', 'c']) gr.add_edge(('a', 'b')) gr.add_edge(('a', 'c')) ma = mutual_accessibility(gr) for n in gr: for m in gr: if (m in ma[n]): assert m in depth_first_search(gr, n)[0] assert n in depth_first_search(gr, m)[0] else: assert m not in depth_first_search( gr, n)[0] or n not in depth_first_search(gr, m)[0]
def test_minimal_spanning_tree_on_graph(self): gr = testlib.new_graph(wt_range=(1,10)) mst = minimal_spanning_tree(gr, root=0) wt = tree_weight(gr, mst) len_dfs = len(depth_first_search(gr, root=0)[0]) for each in mst: if (mst[each] != None): mst_copy = deepcopy(mst) del(mst_copy[each]) for other in gr[each]: mst_copy[each] = other if (tree_weight(gr, mst_copy) < wt): gr2 = graph() add_spanning_tree(gr2, mst_copy) assert len(depth_first_search(gr2, root=0)[0]) < len_dfs
def gen_gv(graph, word): """ Given the source word for definition, build the gv graph based on depth first search result on the given graph """ st, pre, post = depth_first_search(graph, root=word) gst = digraph() gst.add_spanning_tree(st) dot = Digraph(comment=word) nodes = gst.nodes() edges = gst.edges() for node in nodes: dot.node(node) for edge in edges: dot.edge(edge[0], edge[1]) print dot.source word = word.decode('utf-8') gv_path = 'output/' + word + '.gv' # dot.render(gv_path, view=True) outf = codecs.open(gv_path, 'w', 'utf-8') outf.write(dot.render(gv_path, view=True)) outf.close()
def write_graphs_to_dots(self): assert self.build_graph self._load_packages() from pygraph.readwrite import dot base = self.output_dir with open(join(base, 'digraph.dot'), 'w') as f: data = dot.write(self.digraph) f.write(data) with open(join(base, 'bfs.dot'), 'w') as f: (st, order) = breadth_first_search(self.digraph) bfs = digraph() bfs.add_spanning_tree(st) data = dot.write(bfs) f.write(data) with open(join(base, 'dfs.dot'), 'w') as f: (st, pre, post) = depth_first_search(self.digraph) dfs = digraph() dfs.add_spanning_tree(st) data = dot.write(dfs) f.write(data)
def get_connected(self, obj, aklass=None): st, pre, post = depth_first_search(self.gr, root=obj.id) if aklass: return [self.obj_by_id[k] for k in st.keys() if self.obj_by_id[k].__class__ == aklass] else: return [self.obj_by_id[k] for k in st.keys()]
def main(): sweep() print node_par print dg2 tranverse(dg2) print dg2 st, pre, post = depth_first_search(dg2, root=0) print st
def main(): sweep() print node_par print dg2 tranverse(dg2) print dg2 st,pre,post = depth_first_search(dg2,root=0) print st
def testDigraphDFS(self): G = pygraph.digraph() G.add_nodes([1, 2, 3, 4, 5, 6, 7, 8, 9]) G.add_edge(1, 2) G.add_edge(1, 3) G.add_edge(2, 4) G.add_edge(3, 5) G.add_edge(4, 6) G.add_edge(5, 7) G.add_edge(1, 8, wt=3) G.add_edge(7, 8, wt=3) G.add_edge(8, 9) G.add_edge(3, 9) st, pre, post = depth_first_search(G, 1, filter=filters.radius(2)) assert st == {1: None, 2: 1, 3: 1, 4: 2, 5: 3, 9: 3} st, pre, post = depth_first_search(G, 7, filter=filters.radius(2)) assert st == {7: None}
def testSanityDigraph(self): G = pygraph.digraph() G.generate(100, 500) st, pre, post = depth_first_search(G) for each in G: if (st[each] != None): assert pre.index(each) > pre.index(st[each]) assert post.index(each) < post.index(st[each])
def test_dfs_in_digraph(self): gr = testlib.new_digraph() gr.add_node('find-me') gr.add_node('dont-find-me') gr.add_edge((0, 'find-me')) gr.add_edge(('find-me','dont-find-me')) st, pre, post = depth_first_search(gr, root=0, filter=find('find-me')) assert st['find-me'] == 0 assert 'dont-find-me' not in st
def test_dfs_in_digraph(self): gr = testlib.new_digraph() st, pre, post = depth_first_search(gr) for each in gr: if (st[each] != None): assert pre.index(each) > pre.index(st[each]) assert post.index(each) < post.index(st[each]) for node in st: assert gr.has_edge((st[node], node)) or st[node] == None
def get_connected(self, obj, aklass=None): st, pre, post = depth_first_search(self.gr, root=obj.id) if aklass: return [ self.obj_by_id[k] for k in st.keys() if self.obj_by_id[k].__class__ == aklass ] else: return [self.obj_by_id[k] for k in st.keys()]
def test_dfs_in_digraph(self): gr = testlib.new_digraph() gr.add_node('find-me') gr.add_node('dont-find-me') gr.add_edge((0, 'find-me')) gr.add_edge(('find-me', 'dont-find-me')) st, pre, post = depth_first_search(gr, root=0, filter=find('find-me')) assert st['find-me'] == 0 assert 'dont-find-me' not in st
def output_graph(graph, root=None): """ Returns a tuplet containing: - the result of the depth_first_search() function starting at 'root' (is is a tuplet) - a dot format output of the given graph (display it using graphviz dotty command) """ dfs = depth_first_search(graph, root) dot = write(graph) return [dfs, dot]
def discourse_units(self): discourse_units = digraph() for tup in self.tuples: if tup.structure == "discourse" or tup.edge_type == "dominates" or "subordinates" in tup.edge_type: if not tup.from_node in discourse_units.nodes(): discourse_units.add_node(tup.from_node) if not tup.to_node in discourse_units.nodes(): discourse_units.add_node(tup.to_node) discourse_units.add_edge((tup.from_node, tup.to_node)) st, order_pre, order_post = depth_first_search(discourse_units, root="k0") return order_pre
def is_ordered(node, list): # Has parent on list for each in list: if gr.has_edge((each, node)): return True # Has no possible ancestors on list st, pre, post = depth_first_search(gr, node) for each in list: if (each in st): return False return True
def testDigraphDFS(self): G = pygraph.digraph() G.add_nodes([1, 2, 3, 4, 5, 6]) G.add_edge(1, 2) G.add_edge(1, 3) G.add_edge(2, 4) G.add_edge(4, 3) G.add_edge(5, 1) G.add_edge(3, 5) G.add_edge(5, 6) st, pre, post = depth_first_search(G, 1, filter=filters.find(5)) assert st == {1: None, 2: 1, 3: 4, 4: 2, 5: 3}
def testDigraph(self): G = pygraph.digraph() G.add_nodes([1, 2, 3, 4, 5]) G.add_edge(1, 2) G.add_edge(2, 3) G.add_edge(2, 4) G.add_edge(4, 5) G.add_edge(1, 5) G.add_edge(3, 5) st, pre, post = depth_first_search(G) assert st == {1: None, 2: 1, 3: 2, 4: 2, 5: 3} assert pre == [1, 2, 3, 5, 4] assert post == [5, 3, 4, 2, 1]
def topological_sorting(graph): """ Topological sorting. @attention: Topological sorting is meaningful only for directed acyclic graphs. @type graph: digraph @param graph: Graph. @rtype: list @return: Topological sorting for the graph. """ # The topological sorting of a DAG is equivalent to its reverse postordering. order = depth_first_search(graph)[2] order.reverse() return order
def test_topological_sorting_on_tree(self): gr = testlib.new_graph() st, pre, post = depth_first_search(gr) tree = pygraph.classes.digraph.digraph() for each in st: if st[each]: if (each not in tree.nodes()): tree.add_node(each) if (st[each] not in tree.nodes()): tree.add_node(st[each]) tree.add_edge((st[each], each)) ts = topological_sorting(tree) for each in ts: if (st[each]): assert ts.index(each) > ts.index(st[each])
def _reorder_instructions(self): cfg = digraph() cfg.add_nodes(self.blocks) for block in self.blocks: for other in block.successors: cfg.add_edge((block, other)) root = next((b for b in self.blocks if b.type & bbl.BasicBlock.ENTRY), None) instrs = list() span_tree, pre, post = depth_first_search(cfg, root) for block in reversed(post): instrs.extend(block.instrs) for i, ins in enumerate(instrs): ins.pos = i return instrs
def test_dfs_in_digraph(self): gr = testlib.new_graph() st, pre, post = depth_first_search(gr, root=0, filter=radius(3)) for each in st: assert (st[each] == None or st[each] == 0 or st[st[each]] == 0 or st[st[st[each]]] == 0)
def test_dfs_in_empty_graph(self): gr = graph() st, pre, post = depth_first_search(gr, filter=radius(2)) assert st == {} assert pre == [] assert post == []
def query2(k,d): number_of_tags = k results = d.split('-') results = [int(i) for i in results] d = date(results[0],results[1],results[2]) person_set = set() file_obj_person = open(input_directory + '/person.csv') person_reader = csv.DictReader(file_obj_person, delimiter='|') for item in person_reader: results = item ['birthday'].split('-') results = [int(i) for i in results] birthday = date(results[0],results[1],results[2]) if not(birthday < d): person_set.add(item['id']) file_obj_person.close() # tag loading start tag_dict = {} tag_dict_names = {} file_obj_tag = open(input_directory + '/tag.csv') tag_reader = csv.DictReader(file_obj_tag, delimiter='|') for item in tag_reader: tag_dict[item['id']] = set() tag_dict_names[item['id']] = item['name'] file_obj_tag.close() # tag loading complete # person interests file_obj = open(input_directory + '/person_hasInterest_tag.csv') file_reader = csv.DictReader(file_obj, delimiter='|') for item in file_reader: if item['Person.id'] in person_set: tag_dict[item['Tag.id']].add(item['Person.id']) file_obj.close() # person interests # person knows person_knows_dict = {} file_obj = open(input_directory + '/person_knows_person.csv') file_reader = csv.DictReader(file_obj, ('person_1','person_2') ,delimiter='|') file_reader.next() for item in file_reader: if item['person_1'] in person_set and item['person_1'] in person_set: if item['person_1'] in person_knows_dict: person_knows_dict[item['person_1']].append(item['person_2']) else: person_knows_dict[item['person_1']] = [] file_obj.close() # person knows # construct graph tag_graph_list = [] for i in tag_dict: gr = graph() gr.add_nodes(tag_dict[i]) for k in tag_dict[i]: if k in person_knows_dict: for j in person_knows_dict[k]: try: gr.add_edge((k,j)) except: pass graph_range = 0 for k in tag_dict[i]: temp = len(depth_first_search(gr,k)[1]) if graph_range < temp: graph_range = temp tag_graph_list.append((tag_dict_names[i],graph_range)) sorted_list = sorted(tag_graph_list, key=lambda x:(-x[1],x[0])) result_string = '' for i in sorted_list[0:number_of_tags]: if len(result_string) == 0: result_string = i[0] else: result_string = result_string + ' ' + i[0] return result_string
def _checknrun( self, node=None ): # pylint:disable=too-many-branches,too-many-statements """Check and run each class. Assumes all storage and other initialization is complete already. """ # pylint:disable=protected-access if not node: node = self._root nodename = self._class_name(node) _, _, post = depth_first_search(self.graph, root=nodename) # Run dependent nodes and see if all were successful blocked = False for depnode in post: if depnode == nodename: continue # Run dependent nodes if not already successful if not self.nodestatus.get(depnode): self._checknrun(depnode) # Were all dependent nodes happy? if not self.nodestatus.get(depnode): blocked = True # Set as untested if not visited yet if nodename not in self.nodestatus: self._node_untested(nodename) if not blocked: if not self._run_phase > self._retry[nodename]['lastphase']: # Already tried - skip return if not self._retry[nodename]['tries']: # Too many tries - abort return sleeptime = self._retry[nodename]['nexttry'] - time.time() if sleeptime > 0: time.sleep(sleeptime) self._retry[nodename]['nexttry'] = ( time.time() + self._retry[nodename]['retry_delay'] ) self._retry[nodename]['lastphase'] = self._run_phase self._retry[nodename]['tries'] -= 1 try: obj = self._classmap[nodename]() except Exception as err: self._log.error( 'Could not create Job "%s" - check its __init__', nodename ) if self._verbose: sys.stderr.write('%s.check: fail\n' % nodename) if self._debug: sys.stderr.write( ' Could not create job, error was {}\n'.format( traceback.format_exc().strip().replace('\n', '\n ') ) ) self._node_failed(nodename, err) return self._checknrun_storage[nodename] = {} obj._set_storage( self._checknrun_storage[nodename], self._checknrun_storage['__global'], ) self._objsrun.append(obj) # check / run / check try: os.chdir(self._checknrun_cwd) obj._check_phase = 'check' obj._check_results = obj.Check(*self._args, **self._kwargs) self._node_succeeded(nodename, obj._check_results) if self._verbose: sys.stderr.write('{}.check: pass\n'.format(nodename)) except Exception as err: # pylint:disable=broad-except obj._check_exception = err if self._verbose: sys.stderr.write('%s.check: fail\n' % nodename) # In no_act mode, we only run the first check # and get out of dodge. if self._no_act: self._node_failed(nodename, err) if self._debug: sys.stderr.write( ' Error was:\n ' '{}\n'.format( traceback.format_exc() .strip() .replace('\n', '\n ') ) ) return # Run the Run method, which may fail with # wild abandon - we'll be doing a recheck anyway. try: os.chdir(self._checknrun_cwd) obj._run_results = obj.Run(*self._args, **self._kwargs) if self._verbose: sys.stderr.write('%s.run: pass\n' % nodename) except Exception as err: # pylint:disable=broad-except obj._run_exception = err if self._verbose: sys.stderr.write('%s.run: fail\n' % nodename) if self._debug: sys.stderr.write( ' Error was:\n ' '{}\n'.format( traceback.format_exc() .strip() .replace('\n', '\n ') ) ) # Do a recheck try: os.chdir(self._checknrun_cwd) obj._check_phase = 'recheck' obj._recheck_results = obj.Check( *self._args, **self._kwargs ) self._node_eventually_succeeded( nodename, obj._recheck_results ) if self._verbose: sys.stderr.write('%s.recheck: pass\n' % nodename) except Exception as err: # pylint:disable=broad-except obj._recheck_exception = err if self._verbose: sys.stderr.write( '%s.recheck: fail "%s"\n' % (nodename, err) ) if self._debug: sys.stderr.write( ' Error was:\n {}\n'.format( traceback.format_exc() .strip() .replace('\n', '\n ') ) ) self._node_failed(nodename, err)
for neighbor in gr.neighbors(n): if neighbor == node2: return True if not(neighbor in doneSet): toDoSet[neighbor] = True return False if __name__ == "__main__": gr = graph() # Add nodes gr.add_nodes(['X','Y','Z']) gr.add_nodes(['A','B','C', 'D']) # Add edges gr.add_edge(('A','B')) gr.add_edge(('B','C')) gr.add_edge(('C','D')) gr.add_edge(('A','Y')) gr.add_edge(('B','Z')) # Depth first search rooted on node X st, pre, post = depth_first_search(gr, root='X') # Print the spanning tree print st print connected('A', 'D') print connected('A', 'X')
gr.add_edge(("d", "b")) gr.add_edge(("e", "d")) gr.add_edge(("e", "f")) gr.add_edge(("f", "d")) print "\nEl grafo es: \n" print gr # recorrido breadth first print "\nBreath first:\n" st, order = breadth_first_search(gr, root="a") print order # recorrido depth first print "\nDepth first: \n" st, order, order2 = depth_first_search(gr, root="a") print order def pagerank(graph, damping_factor=0.85, max_iterations=100, min_delta=0.00001): nodes = graph.nodes() graph_size = len(nodes) if graph_size == 0: return {} min_value = (1.0 - damping_factor ) / graph_size #value for nodes without inbound links # itialize the page rank dict with 1/N for all nodes
def test_find_cycle_on_digraph_without_cycles(self): gr = testlib.new_digraph() st, pre, post = depth_first_search(gr) gr = digraph() gr.add_spanning_tree(st) assert find_cycle(gr) == []
# ориентация безразлична gr.add_edge(( nodes[0],'_TASK_FullOnMip' )) gr.add_edge(( '_TASK_FullOnMip', '_TASK_NextUmOn')) gr.add_edge(( nodes[0], '_TASK_OTKAZ_CurUm' )) # Распределение узлов посел всех итераций listFindRoots = ['_TASK_NextUmOn', '_TASK_OTKAZ_CurUm'] ''' Тут проблема решена, может резве, что офформление сделать покрасивее 1. русские комментарии к функциям ''' # Поиск путей root = nodes[0] # ищем с того который задан в поиске вызовов st = depth_first_search(gr, root=root) # Вывод на экран for root_pr in listFindRoots : print '# call Top <-' step = '' root_pr_inner = root_pr while (1): # рисуем if root_pr_inner != root : print step+root_pr_inner else : print step+root_pr_inner + ' <- call Bottom' # действия по выходу step += ' '
if col == 0: key = val if col == 1: value = val if not gr.has_node(key): gr.add_node(key) if not gr.has_node(value): gr.add_node(value) gr.add_edge((key, value), randrange(10)) return gr # main execution if check_args(): gr = read_file() #print graph st, pre, post = depth_first_search(gr, root='9') print st # Draw as PNG dot = write(gr) gvv = gv.readstring(dot) gv.layout(gvv, 'dot') gv.render(gvv, 'png', 'dfs.png') st, pre = breadth_first_search(gr, root='9') print st # Draw as PNG dot = write(gr) gvv = gv.readstring(dot) gv.layout(gvv, 'dot') gv.render(gvv, 'png', 'bfs.png')
def test_dfs_in_empty_graph(self): gr = graph() st, pre, post = depth_first_search(gr) assert st == {} assert pre == [] assert post == []
def test_dfs_in_empty_digraph(self): gr = pygraph.classes.digraph.digraph() st, pre, post = depth_first_search(gr) assert st == {} assert pre == [] assert post == []
def test_dfs_in_empty_graph(self): gr = graph() st, pre, post = depth_first_search(gr, filter=filters.radius(2)) assert st == {} assert pre == [] assert post == []
def _checknrun(self, node=None): # pylint:disable=too-many-branches """Check and run each class. Assumes all storage and other initialization is complete already. """ # pylint:disable=protected-access if not node: node = self._root nodename = self._class_name(node) _, _, post = depth_first_search(self.graph, root=nodename) # Run dependent nodes and see if all were successful blocked = False for depnode in post: if depnode == nodename: continue # Run them if self.nodestatus.get(depnode, None) is None: self._checknrun(depnode) # Were they happy? if not self.nodestatus.get(depnode, None): blocked = True self._node_untested(nodename) if not blocked: obj = self._classmap[nodename]() self._checknrun_storage[nodename] = {} obj._set_storage(self._checknrun_storage[nodename], self._checknrun_storage['__global']) self._objsrun.append(obj) # check / run / check try: os.chdir(self._checknrun_cwd) obj._check_phase = 'check' obj._check_results = obj.Check(*self._args, **self._kwargs) self._node_succeeded(nodename, obj._check_results) if self._verbose: sys.stderr.write('{}.check: pass\n'.format(nodename)) except Exception as err: # pylint:disable=broad-except obj._check_exception = err if self._verbose: sys.stderr.write('%s.check: fail\n' % nodename) # In no_act mode, we only run the first check # and get out of dodge. if self._no_act: self._node_failed(nodename, err) if self._debug: sys.stderr.write( ' Error was:\n ' '{}\n'.format( traceback.format_exc().strip().replace( '\n', '\n '))) return # Run the Run method, which may fail with # wild abandon - we'll be doing a recheck anyway. try: os.chdir(self._checknrun_cwd) obj._run_results = obj.Run(*self._args, **self._kwargs) if self._verbose: sys.stderr.write('%s.run: pass\n' % nodename) except Exception as err: #pylint:disable=broad-except obj._run_exception = err if self._verbose: sys.stderr.write('%s.run: fail\n' % nodename) if self._debug: sys.stderr.write( ' Error was:\n ' '{}\n'.format( traceback.format_exc().strip().replace( '\n', '\n '))) # Do a recheck try: os.chdir(self._checknrun_cwd) obj._check_phase = 'recheck' obj._recheck_results = obj.Check(*self._args, **self._kwargs) self._node_eventually_succeeded(nodename, obj._recheck_results) if self._verbose: sys.stderr.write('%s.recheck: pass\n' % nodename) except Exception as err: # pylint:disable=broad-except obj._recheck_exception = err if self._verbose: sys.stderr.write('%s.recheck: fail "%s"\n' % (nodename, err)) if self._debug: sys.stderr.write(' Error was:\n {}\n'.format( traceback.format_exc().strip().replace( '\n', '\n '))) self._node_failed(nodename, err)