def get_reg_live_subsets(instrs, code, igraph): """Computes the subsets of the instructions where each register is live. Retunrs a dictionary keyed with the register name.""" # compute the live subsets live_regs = dict() for reg in ('eax', 'ebx', 'ecx', 'edx', 'edi', 'esi', 'ebp', 'esp'): # TODO: insn.REGS[:8] live_regs[reg] = Lifetime(reg, len(instrs)) class live_filter(null_filter): def __call__(self, other, node): # always check 'other' which is the candidate. (includes root) # but also check if node is set which takes care of root if node and self.cur_reg not in other.IN: # print "forward filtered %s from %s (reg=%s)" % ( # str(other), str(node), self.cur_reg) return False return True # compute live regions for register values "born" withing this function live_f = live_filter() for ins in instrs: diff = ins.OUT - ins.IN if len(diff) > 1 and ins.mnem not in ("call", "cpuid", "rdtsc"): print "WARNING: more than one regs defined at", ins, ins.OUT, ins.IN for reg in diff: live_f.cur_reg = reg st, order = breadth_first_search(igraph, ins, live_f) live_regs[live_f.cur_reg].add_subset(order) # if a DEFed register is not in OUT it's an one-instr live region. # if that register is also in the implicit set of the instruction, # it will be marked as unswappable for ins in instrs: for reg in ins.DEF: if reg not in ins.OUT: # print "one-instr live region, register", reg, " in ", ins live_regs[reg].add_subset([ins]) # add live regions for registers that where alive before this function # was called. if not instrs[0].f_entry: # debug! print "BUG: compute_live: instrs[0] is not f_entry!!!" # print "compute_live_regions: checking", instrs[0] for reg in instrs[0].IN: # print "compute_live_regions: checking", reg, "in", instrs[0] live_f.cur_reg = reg st, order = breadth_first_search(igraph, instrs[0], live_f) # print reg, "live region", [i.pos for i in order] # let's handle the special case of region splitting for indirect calls live_regs[live_f.cur_reg].add_subset(order) return live_regs
def get_reg_live_subsets(instrs, code, igraph): """Computes the subsets of the instructions where each register is live. Retunrs a dictionary keyed with the register name.""" # compute the live subsets live_regs = dict() for reg in ('eax', 'ebx', 'ecx', 'edx', 'edi', 'esi', 'ebp', 'esp'): # TODO: insn.REGS[:8] live_regs[reg] = Lifetime(reg, len(instrs)) class live_filter(null_filter): def __call__(self, other, node): # always check 'other' which is the candidate. (includes root) # but also check if node is set which takes care of root if node and self.cur_reg not in other.IN: # print "forward filtered %s from %s (reg=%s)" % ( # str(other), str(node), self.cur_reg) return False return True # compute live regions for register values "born" withing this function live_f = live_filter() for ins in instrs: diff = ins.OUT - ins.IN if len(diff) > 1 and ins.mnem not in ("call", "cpuid", "rdtsc"): print "WARNING: more than one regs defined at", ins, ins.OUT, ins.IN for reg in diff: live_f.cur_reg = reg st, order = breadth_first_search(igraph, ins, live_f) live_regs[live_f.cur_reg].add_subset(order) # if a DEFed register is not in OUT it's an one-instr live region. # if that register is also in the implicit set of the instruction, # it will be marked as unswappable for ins in instrs: for reg in ins.DEF: if reg not in ins.OUT: # print "one-instr live region, register", reg, " in ", ins live_regs[reg].add_subset([ins]) # add live regions for registers that where alive before this function # was called. if not instrs[0].f_entry: # debug! print "BUG: compute_live: instrs[0] is not f_entry!!!" # print "compute_live_regions: checking", instrs[0] for reg in instrs[0].IN: # print "compute_live_regions: checking", reg, "in", instrs[0] live_f.cur_reg = reg st, order = breadth_first_search(igraph, instrs[0], live_f) # print reg, "live region", [i.pos for i in order] # let's handle the special case of region splitting for indirect calls live_regs[live_f.cur_reg].add_subset(order) return live_regs
def handle(self, **options): gr = graph() cats_by_id = dict((c.id, c) for c in Category.objects.all()) # Add nodes dups = count() for c in cats_by_id.itervalues(): try: gr.add_node(c) except AdditionError: dups.next() parent = cats_by_id.get(c.parent_id) print 'WARNING: duplicate node :: <Category %i | %s>' % (c.id, c) print '\twith parent ' + '<Category %i | %s>' % ( parent.id, parent) if parent else 'None' if dups.next() > 0: return # Add edges # gr.add_edge((CONCRETE_NODE, ROOT_NODE)) for c in cats_by_id.itervalues(): parent = cats_by_id.get(c.parent_id) if parent: gr.add_edge((c, parent)) # import ipdb; ipdb.set_trace() # The whole tree from the root st, order = breadth_first_search( gr, root=Category.objects.get(title="Abstract")) gst = digraph() gst.add_spanning_tree(st) dot = write(gst) gvv = gv.readstring(dot) gv.layout(gvv, 'dot') gv.render(gvv, 'pdf', os.path.join(output_dir, 'abstract.pdf')) st, order = breadth_first_search( gr, root=Category.objects.get(title="Concrete")) gst = digraph() gst.add_spanning_tree(st) dot = write(gst) gvv = gv.readstring(dot) gv.layout(gvv, 'dot') gv.render(gvv, 'pdf', os.path.join(output_dir, 'concrete.pdf'))
def bfs_set_label(g, root, data, value, radius): from pygraph.algorithms.filters.radius import radius as Radius from pygraph.algorithms.searching import breadth_first_search gr = graph_pygraph(g) filt_radius = Radius(radius) _, o = breadth_first_search(gr, root, filt_radius) data[o] = value
def bfs_set_label(g, root, data, value, radius): from pygraph.algorithms.filters.radius import radius as Radius from pygraph.algorithms.searching import breadth_first_search gr = graph_pygraph(g) filt_radius = Radius(radius) _, o = breadth_first_search(gr, root, filt_radius) data[o] = value
def getSSAroundSS(self, solarSystemID, jumps): ss = self.getSSInfo(solarSystemID) color = 0 if ss[2] > 0.5: color = "green" else: color = "red" ssRegion = colored(ss[0], color) ssName = colored(ss[1], color) ssSecruity = colored("%.1f" % ss[2], color) if self.ssgraph: gr = self.ssgraph else: gr = graph() nodes = self.getAllSS() gr.add_nodes(nodes) for edge in self.getAllSSEdges(): gr.add_edge(edge) print "Searching for Solar Systems around %s: %s(%s) in %d jumps." % (ssRegion, ssName, ssSecruity, jumps) ssinrad = breadth_first_search(gr, solarSystemID, radius(jumps)) ssinrad = ssinrad[1] text = "Found %d systems" % len(ssinrad) text = colored(text, "cyan") print "Done. %s, including current one." % text return ssinrad
def write_graphs_to_dots(self): assert self.build_graph self._load_packages() from pygraph.readwrite import dot base = self.output_dir with open(join(base, 'digraph.dot'), 'w') as f: data = dot.write(self.digraph) f.write(data) with open(join(base, 'bfs.dot'), 'w') as f: (st, order) = breadth_first_search(self.digraph) bfs = digraph() bfs.add_spanning_tree(st) data = dot.write(bfs) f.write(data) with open(join(base, 'dfs.dot'), 'w') as f: (st, pre, post) = depth_first_search(self.digraph) dfs = digraph() dfs.add_spanning_tree(st) data = dot.write(dfs) f.write(data)
def _invalidate_caches(self): 'invalidate the downstream caches of updated nodes' if len(self.updated) == 0: return # Sort the nodes in worklist and remove duplicates sg = topological_sorting(self.digraph) # sorted graph worklist = [] # insert nodes into worklist in sorted order for node in sg: if node in self.updated: worklist.append(node) self.updated.clear() # iterate through worklist while worklist: node = worklist.pop() # one item at a time downstream = breadth_first_search(self.digraph, root=node)[1] # get all downstream labels for n in downstream: if n in worklist: # remove labels that will already be done worklist.remove(n) # remove cache entries self.cache[n] = None
def testSanityDigraph(self): G = pygraph.digraph() G.generate(100, 500) st, lo = breadth_first_search(G) for each in G: if (st[each] != None): assert lo.index(each) > lo.index(st[each])
def write_graphs_to_dots(self): assert self.build_graph self._load_packages() from pygraph.readwrite import dot base = self.output_dir with open(join(base, 'digraph.dot'), 'w') as f: data = dot.write(self.digraph) f.write(data) with open(join(base, 'bfs.dot'), 'w') as f: (st, order) = breadth_first_search(self.digraph) bfs = digraph() bfs.add_spanning_tree(st) data = dot.write(bfs) f.write(data) with open(join(base, 'dfs.dot'), 'w') as f: (st, pre, post) = depth_first_search(self.digraph) dfs = digraph() dfs.add_spanning_tree(st) data = dot.write(dfs) f.write(data)
def testDigraphBFS(self): G = pygraph.digraph() G.add_nodes([1, 2, 3, 4, 5, 6, 7, 8, 9]) G.add_edge(1, 2) G.add_edge(1, 3) G.add_edge(2, 4) G.add_edge(3, 5) G.add_edge(4, 6) G.add_edge(5, 7) G.add_edge(1, 8, wt=3) G.add_edge(7, 8, wt=3) G.add_edge(8, 9) G.add_edge(3, 9) st, lo = breadth_first_search(G, 1, filter=filters.radius(2)) assert st == {1: None, 2: 1, 3: 1, 4: 2, 5: 3, 9: 3} st, lo = breadth_first_search(G, 7, filter=filters.radius(2)) assert st == {7: None}
def search(self): st, order = breadth_first_search(self.net, "book") gst = digraph() gst.add_spanning_tree(st) dot = write(gst, True) out_file = open("file.gv", "w") out_file.write(dot) out_file.close()
def test_bfs_in_digraph(self): gr = testlib.new_digraph() gr.add_node('find-me') gr.add_edge((0, 'find-me')) st, lo = breadth_first_search(gr, root=0, filter=find('find-me')) assert st['find-me'] == 0 for each in st: assert st[each] == None or st[each] == 0 or st[st[each]] == 0
def handle(self, **options): gr = graph() cats_by_id = dict((c.id, c) for c in Category.objects.all()) # Add nodes dups = count() for c in cats_by_id.itervalues(): try: gr.add_node(c) except AdditionError: dups.next() parent = cats_by_id.get(c.parent_id) print 'WARNING: duplicate node :: <Category %i | %s>' % (c.id, c) print '\twith parent ' + '<Category %i | %s>' % (parent.id, parent) if parent else 'None' if dups.next() > 0: return # Add edges # gr.add_edge((CONCRETE_NODE, ROOT_NODE)) for c in cats_by_id.itervalues(): parent = cats_by_id.get(c.parent_id) if parent: gr.add_edge((c, parent)) # import ipdb; ipdb.set_trace() # The whole tree from the root st, order = breadth_first_search(gr, root=Category.objects.get(title="Abstract")) gst = digraph() gst.add_spanning_tree(st) dot = write(gst) gvv = gv.readstring(dot) gv.layout(gvv, 'dot') gv.render(gvv, 'pdf', os.path.join(output_dir, 'abstract.pdf')) st, order = breadth_first_search(gr, root=Category.objects.get(title="Concrete")) gst = digraph() gst.add_spanning_tree(st) dot = write(gst) gvv = gv.readstring(dot) gv.layout(gvv, 'dot') gv.render(gvv, 'pdf', os.path.join(output_dir, 'concrete.pdf'))
def search(self): st, order = breadth_first_search(self.net, "book") gst = digraph() gst.add_spanning_tree(st) dot = write(gst, True) out_file = open("file.gv", "w") out_file.write(dot) out_file.close()
def test_bfs_in_digraph(self): gr = testlib.new_digraph() st, lo = breadth_first_search(gr) for each in gr: if (st[each] != None): assert lo.index(each) > lo.index(st[each]) for node in st: assert gr.has_edge((st[node], node)) or st[node] == None
def test_bfs_in_digraph(self): gr = testlib.new_digraph() gr.add_node('find-me') gr.add_edge((0, 'find-me')) st, lo = breadth_first_search(gr, root=0, filter=find('find-me')) assert st['find-me'] == 0 for each in st: assert st[each] == None or st[each] == 0 or st[st[each]] == 0
def sample_gene_interactions(c, args, idx_to_sample): #fetch variant gene dict for all samples get_variant_genes(c, args, idx_to_sample) #file handle for fetching the hprd graph file_graph = os.path.join(path_dirname, 'hprd_interaction_graph') #load the graph using cPickle and close file handle gr = graph() f = open(file_graph, 'rb') gr = cPickle.load(f) f.close() k = [] variants = [] #calculate nodes from the graph hprd_genes = gr.nodes() if args.gene == None or args.gene not in hprd_genes: sys.stderr.write("gene name not given else not represented in the p-p interaction file\n") elif args.gene in hprd_genes: x, y = \ breadth_first_search(gr,root=args.gene,filter=radius(args.radius)) gst = digraph() gst.add_spanning_tree(x) dot = write(gst) out.write(dot) st, sd = shortest_path(gst, args.gene) if args.var_mode: for sample in sam.iterkeys(): var = sam[str(sample)] #for each level return interacting genes if they are # variants in the sample. # 0th order would be returned if the user chosen # gene is a variant in the sample for x in range(0, (args.radius+1)): for each in var: for key, value in sd.iteritems(): if value == x and key == each[0]: print "\t".join([str(sample),str(args.gene), \ str(x), \ str(key), \ str(each[1]), \ str(each[2]), \ str(each[3])]) elif (not args.var_mode): for sample in sam.iterkeys(): for each in sam[str(sample)]: variants.append(each[0]) for x in range(0, (args.radius+1)): for key, value in sd.iteritems(): if value == x and key in set(variants): k.append(key) if k: print "\t".join([str(sample), str(args.gene), \ str(x)+"_order:", ",".join(k)]) else: print "\t".join([str(sample), str(args.gene), str(x)+"_order:", "none"]) #initialize keys for next iteration k = []
def searchNodes(self, root): st, order = breadth_first_search(self._gr, root=root) gst = digraph() #gst.add_spanning_tree(st) #dot = write(gst) #with open("odata/grf.dot", 'w') as f: # f.write(dot) #call(["dot", "odata/grf.dot", "-Tjpg", "-o", "odata/grf.jpg"]) return order
def testGraphBFS(self): G = pygraph.graph() G.add_nodes([1, 2, 3, 4, 5]) G.add_edge(1, 2) G.add_edge(2, 3) G.add_edge(2, 4) G.add_edge(4, 5) G.add_edge(1, 5) G.add_edge(3, 5) st, lo = breadth_first_search(G, 1, filter=filters.find(5)) assert st == {1: None, 2: 1, 5: 1}
def testDigraphBFS(self): G = pygraph.digraph() G.add_nodes([1, 2, 3, 4, 5, 6]) G.add_edge(1, 2) G.add_edge(1, 3) G.add_edge(2, 4) G.add_edge(4, 3) G.add_edge(5, 1) G.add_edge(3, 5) G.add_edge(5, 6) st, lo = breadth_first_search(G, 1, filter=filters.find(5)) assert st == {1: None, 2: 1, 3: 1, 4: 2, 5: 3}
def testDigraph(self): G = pygraph.digraph() G.add_nodes([1, 2, 3, 4, 5]) G.add_edge(1, 2) G.add_edge(2, 3) G.add_edge(2, 4) G.add_edge(4, 5) G.add_edge(1, 5) G.add_edge(3, 5) st, lo = breadth_first_search(G) assert st == {1: None, 2: 1, 3: 2, 4: 2, 5: 1} assert lo == [1, 2, 5, 3, 4]
def bfs_sub_graph(g, root, radius): from pygraph.algorithms.filters.radius import radius as Radius from pygraph.algorithms.searching import breadth_first_search gr = graph_pygraph(g) filt_radius = Radius(radius) _, o = breadth_first_search(gr, root, filt_radius) to_keep = sorted(o) new_indexes = np.zeros(len(g)) new_indexes[to_keep] = range(len(to_keep)) subg = g[to_keep] for nl in subg: for i in xrange(len(nl)): nl[i] = new_indexes[nl[i]] return subg, to_keep
def bfs_sub_graph(g, root, radius): from pygraph.algorithms.filters.radius import radius as Radius from pygraph.algorithms.searching import breadth_first_search gr = graph_pygraph(g) filt_radius = Radius(radius) _, o = breadth_first_search(gr, root, filt_radius) to_keep = sorted(o) new_indexes = np.zeros(len(g)) new_indexes[to_keep] = range(len(to_keep)) subg = g[to_keep] for nl in subg: for i in xrange(len(nl)): nl[i] = new_indexes[nl[i]] return subg, to_keep
def level_up_down(graph): """ Yield the nodes of the graph, for an arbitrary root, starting with the leaves, running up to the root node, and pushing back down toward the leaves, excepting the leaves themselves. Undefined behavior if the graph is not a tree. """ arbitrary_root = next(NodeScheduler.uniform(graph)) (_, ordering) = breadth_first_search(graph, root = arbitrary_root) for node in reversed(ordering): yield node for node in ordering[1 :]: if graph.node_order(node) > 1: yield node
def get_word_st(self, word): """Create a Spanning Tree for a word passed as and arg. :Parameters: - `word`: (str) A word to build a spanning tree for. :Returns: - `word_st` (obj) A Directed Graph instance. :Raises: - None. """ s_tree, order = breadth_first_search(self._graph, root=word.encode('utf-8')) word_st = digraph() word_st.add_spanning_tree(s_tree) return word_st
def GetPaths(gr, start, ends): """produce shortest paths from 'start' to 'ends' on gr an empty list represents that there is no path from `start' to an `end' :param gr: undirected graph :type gr: pygraph.classes.graph :param start: node identifier of 'start' node :type start: string :param ends: a list of node identifiers :type ends: list of strings :returns: a list of paths :rtype: list""" paths = list() st, order = breadth_first_search(gr, root=start) for end in ends: paths.append(TracePath(st, end)) return paths
def test_bfs_in_graph(self): gr = testlib.new_graph() st, lo = breadth_first_search(gr, root=0, filter=radius(3)) for each in st: assert (st[each] == None or st[each] == 0 or st[st[each]] == 0 or st[st[st[each]]] == 0)
def sampleLink(self, index, list_index, ll): s = SamplerStateTracker.samplerStates[SamplerStateTracker.current_iter] table_id = s.get_t(index, list_index) customersAtTable = s.getCustomersAtTable(table_id, list_index) orig_table_members = [] g = digraph() ug = graph() for i in customersAtTable: orig_table_members.append(i) if not g.has_node(i): ug.add_node(i) g.add_node(i) j = s.getC(i, list_index) if not g.has_node(j): ug.add_node(j) g.add_node(j) g.add_edge((i,j)) ug.add_edge((i,j)) cycles = find_cycle(g) isCyclePresent = False if index in cycles: isCyclePresent = True if not isCyclePresent: # obs to sample moval will split the table into 2 ug.del_edge((index, s.getC(index, list_index))) temp, new_table_members = breadth_first_search(ug, index) orig_table_members = new_table_members temp, old_table_members = breadth_first_search(ug, s.getC(index, list_index)) s.setT(s.getT() + 1) s.setC(None, index, list_index) new_table_id = self.emptyTables[list_index].remove(0) for l in new_table_members: s.set_t(new_table_id, l , list_index) old_table_id = table_id s.setCustomersAtTable(set(old_table_members), old_table_id, list_index) table_id = new_table_id s.setCustomersAtTable(set(new_table_members), new_table_id, list_index) distanceMatrices = Data.getDistanceMatrices() distance_matrix = distanceMatrices[list_index] priors = distance_matrix[index] priors[index] = ll.getHyperParameters().getSelfLinkProb() sum_p = sum(priors) priors = priors/sum_p posterior = [] indexes = [] for i in range(len(priors)): if priors[i] != 0: indexes.append(i) table_proposed = s.get_t(i, list_index) if table_proposed == table_id: posterior.append(priors[i]) else: proposedTableMembersSet = s.getCustomersAtTable(table_proposed, list_index) proposed_table_members = list(proposedTableMembersSet) change_in_log_likelihood = self.compute_change_in_likelihood(ll, orig_table_members,proposed_table_members, list_index) posterior.append(exp(log(priors[i]+change_in_log_likelihood))) sample = Util.sample(posterior) customer_assignment_index = indexes[sample] assigned_table = s.get_t(customer_assignment_index, list_index) s.setC(customer_assignment_index, index, list_index) if assigned_table != table_id: s.setT(s.getT()-1) for members in orig_table_members: s.set_t(assigned_table, members, list_index) hs_orig_members_in_new_table = set(s.getCustomersAtTable(assigned_table, list_index)) for i in range(len(orig_table_members)): hs_orig_members_in_new_table.add(orig_table_members[i]) s.setCustomersAtTable(hs_orig_members_in_new_table, assigned_table, list_index) s.setCustomersAtTable(None, table_id, list_index) self.emptyTables[index].append(table_id)
def testbfs_in_empty_graph(self): gr = graph() st, lo = breadth_first_search(gr, filter=radius(2)) assert st == {} assert lo == []
def find_affected_nodes(self, ci_id): try: search_tree, pre = breadth_first_search(self.graph, ci_id) except KeyError: return [] return (search_tree, pre)
def test_bfs_in_digraph(self): gr = testlib.new_digraph() st, lo = breadth_first_search(gr, root=0, filter=radius(3)) for each in st: assert (st[each] == None or st[each] == 0 or st[st[each]] == 0 or st[st[st[each]]] == 0)
def sample_lof_interactions(c, args, idx_to_sample, samples): lof = get_lof_genes(c, args, idx_to_sample) #file handle for fetching the hprd graph file_graph = os.path.join(path_dirname, 'hprd_interaction_graph') #load the graph using cPickle and close file handle gr = graph() f = open(file_graph, 'rb') gr = cPickle.load(f) f.close() #calculate nodes from the graph hprd_genes = gr.nodes() #initialize keys k = [] variants = [] if (not args.var_mode): for sample in lof.iterkeys(): lofvariants = list(set(lof[str(sample)])) for each in samples[str(sample)]: variants.append(each[0]) for gene in lofvariants: if gene in hprd_genes: x, y = \ breadth_first_search(gr,root=gene,\ filter=radius(args.radius)) gst = digraph() gst.add_spanning_tree(x) st, sd = shortest_path(gst, gene) # for each level return interacting genes # if they are variants in the sample. for rad in range(1, (args.radius+1)): for key, value in sd.iteritems(): if (value == rad) and key in set(variants): k.append(key) if k: print "\t".join([str(sample), \ str(gene), \ str(rad)+"_order:", ",".join(k)]) else: print "\t".join([str(sample), \ str(gene), \ str(rad)+"_order:", \ "none"]) #initialize k k = [] elif args.var_mode: for sample in lof.iterkeys(): lofvariants = list(set(lof[str(sample)])) var = samples[str(sample)] for gene in lofvariants: if gene in hprd_genes: x, y = \ breadth_first_search(gr,root=gene, \ filter=radius(args.radius)) gst = digraph() gst.add_spanning_tree(x) st, sd = shortest_path(gst, gene) for rad in range(1, (args.radius+1)): for each in var: for key, value in sd.iteritems(): if value == rad and key == each[0]: print "\t".join([str(sample), \ str(gene), \ str(rad), \ str(key), \ str(each[1]), \ str(each[2]), \ str(each[3]), \ str(each[4]), \ str(each[5]), \ str(each[6]), \ str(each[7]), \ str(each[8]), \ str(each[9]), \ str(each[10])])
def bfs_pygraph(top_node, visit, graph): breadth_first_search(graph=graph, root=top_node)
for col,val in enumerate(line.split(' ')): if col == 0: key = val if col == 1: value = val if not gr.has_node(key): gr.add_node(key) if not gr.has_node(value): gr.add_node(value) gr.add_edge((key,value), randrange(10)) return gr # main execution if check_args(): gr = read_file() #print graph st, pre, post = depth_first_search(gr, root='9') print st # Draw as PNG dot = write(gr) gvv = gv.readstring(dot) gv.layout(gvv,'dot') gv.render(gvv,'png','dfs.png') st, pre = breadth_first_search(gr, root='9') print st # Draw as PNG dot = write(gr) gvv = gv.readstring(dot) gv.layout(gvv,'dot') gv.render(gvv,'png','bfs.png')
if col == 0: key = val if col == 1: value = val if not gr.has_node(key): gr.add_node(key) if not gr.has_node(value): gr.add_node(value) gr.add_edge((key, value), randrange(10)) return gr # main execution if check_args(): gr = read_file() #print graph st, pre, post = depth_first_search(gr, root='9') print st # Draw as PNG dot = write(gr) gvv = gv.readstring(dot) gv.layout(gvv, 'dot') gv.render(gvv, 'png', 'dfs.png') st, pre = breadth_first_search(gr, root='9') print st # Draw as PNG dot = write(gr) gvv = gv.readstring(dot) gv.layout(gvv, 'dot') gv.render(gvv, 'png', 'bfs.png')
gr.add_edge("Austria","Germany") gr.add_edge("Austria","Italy") gr.add_edge("Austria","Czech Republic") gr.add_edge("Austria","Slovakia") gr.add_edge("Austria","Hungary") gr.add_edge("Denmark","Germany") gr.add_edge("Poland","Czech Republic") gr.add_edge("Poland","Slovakia") gr.add_edge("Poland","Germany") gr.add_edge("Czech Republic","Slovakia") gr.add_edge("Czech Republic","Germany") gr.add_edge("Slovakia","Hungary") # Draw as PNG dot = gr.write(fmt='dot') gvv = gv.readstring(dot) gv.layout(gvv,'dot') gv.render(gvv,'png','europe.png') # Then, draw the breadth first search spanning tree rooted in Switzerland st, order = breadth_first_search(gr, root="Switzerland") gst = pygraph.digraph() gst.add_spanning_tree(st) dot = gst.write(fmt='dot') gvv = gv.readstring(dot) gv.layout(gvv,'dot') gv.render(gvv,'png','europe-st.png')
gr.add_node(method) # Find Entry Points Into Program if method.is_main(): main_methods.append(method) # Find all methods that create Intents if method.creates_intent(): methods_create_intents.append(method) # Find all methods that take Intents as parameters if method.has_intent_param(): methods_param_intents.append(method) # Create caller/callees relationships as directed edges for method in Methods.values(): for callee in method.callees(): if not gr.has_edge((method, callee)): gr.add_edge((method, callee)) for main in main_methods: st, order = breadth_first_search(gr, root=main) gst = digraph() gst.add_spanning_tree(st) nodes = gst.nodes() for method in nodes: if method.creates_intent(): world.send_entity_up(method.signature(), "square") else: world.send_entity_up(method.signature(), "circle") raw_input("Press Enter to Continue") world.clear() # world.send_link_up(method.signature(), 0, callee.signature(), 0)
gr.add_edge(("a", "d")) gr.add_edge(("a", "f")) gr.add_edge(("b", "c")) gr.add_edge(("b", "f")) gr.add_edge(("c", "d")) gr.add_edge(("d", "b")) gr.add_edge(("e", "d")) gr.add_edge(("e", "f")) gr.add_edge(("f", "d")) print "\nEl grafo es: \n" print gr # recorrido breadth first print "\nBreath first:\n" st, order = breadth_first_search(gr, root="a") print order # recorrido depth first print "\nDepth first: \n" st, order, order2 = depth_first_search(gr, root="a") print order def pagerank(graph, damping_factor=0.85, max_iterations=100, min_delta=0.00001): nodes = graph.nodes() graph_size = len(nodes) if graph_size == 0:
# Dormindo um pouco para evitar bloqueios time.sleep(5) # Criando grafo gr = graph() gr.add_node(RANGE) # Pegando ips ips = IPSet([RANGE]) todos = [] for ip in ips: todos.append(ip) # Fazendo shuffle random.shuffle(todos) for ip in todos: try: search(str(ip), gr) except httplib.IncompleteRead: print "[!] Erro httplib.IncompleteRead: IP (%s)" % ip st, order = breadth_first_search(gr, root=RANGE) gst = digraph() gst.add_spanning_tree(st) dot = write(gst) gvv = gv.readstring(dot) gv.layout(gvv, 'dot') gv.render(gvv, 'png', 'bing.png')
def sample_gene_interactions(c, args, idx_to_sample): out = open("file.dot", 'w') #fetch variant gene dict for all samples samples = get_variant_genes(c, args, idx_to_sample) #file handle for fetching the hprd graph config = read_gemini_config(args=args) path_dirname = config["annotation_dir"] file_graph = os.path.join(path_dirname, 'hprd_interaction_graph') #load the graph using cPickle and close file handle gr = graph() f = open(file_graph, 'rb') gr = cPickle.load(f) f.close() k = [] variants = [] #calculate nodes from the graph hprd_genes = gr.nodes() if args.gene == None or args.gene not in hprd_genes: sys.stderr.write("Gene name not found or") sys.stderr.write(" gene not in p-p interaction file\n") elif args.gene in hprd_genes: x, y = \ breadth_first_search(gr,root=args.gene,filter=radius(args.radius)) gst = digraph() gst.add_spanning_tree(x) dot = write(gst) out.write(dot) st, sd = shortest_path(gst, args.gene) if args.var_mode: for sample in samples.iterkeys(): var = samples[str(sample)] #for each level return interacting genes if they are # variants in the sample. # 0th order would be returned if the user chosen # gene is a variant in the sample for x in range(0, (args.radius + 1)): for each in var: for key, value in sd.iteritems(): if value == x and key == each[0]: print "\t".join([str(sample),str(args.gene), \ str(x), \ str(key), \ str(each[1]), \ str(each[2]), \ str(each[3]), \ str(each[4]), \ str(each[5]), \ str(each[6]), \ str(each[7]), \ str(each[8]), \ str(each[9]), \ str(each[10]), \ str(each[11])]) elif (not args.var_mode): for sample in samples.iterkeys(): for each in samples[str(sample)]: variants.append(each[0]) for x in range(0, (args.radius + 1)): for key, value in sd.iteritems(): if value == x and key in set(variants): k.append(key) if k: print "\t".join([str(sample), str(args.gene), \ str(x)+"_order:", ",".join(k)]) else: print "\t".join([str(sample), str(args.gene), \ str(x)+"_order:", "none"]) #initialize keys for next iteration k = [] #initialize variants list for next iteration variants = []
def testbfs_in_empty_graph(self): gr = graph() st, lo = breadth_first_search(gr, filter=radius(2)) assert st == {} assert lo == []
def test_bfs_in_empty_graph(self): gr = pygraph.classes.graph.graph() st, lo = breadth_first_search(gr) assert st == {} assert lo == []
def analyze_registers(self, functions): # quickly check if this func is SEH prolog or epilog and skip analysis if self.name and ("SEH_prolog" in self.name or "SEH_epilog" in self.name): return # normal analyze # find all the arguments and pushes use_f = _use_filter() if not self.instrs[0].f_entry: # debug! print "BUG: analyze_registers: instrs[0] is not f_entry!!!" st, order = breadth_first_search(self.igraph, self.instrs[0], use_f) # let's sort out the preserved registers first # XXX: check for the special case of SEH and skip the usual check! if not self.check_SEH_preservs(functions): #TODO: handle enter as push ebp!! (there is no enter in reader dlls..) pushes, pops = [], [] for ins in self.instrs: if ins.mnem == "leave" or (ins.mnem == "pop" and ins.op1.type == insn.Operand.REGISTER): pops.append(ins) elif (ins.mnem == "push" and ins.op1.type == insn.Operand.REGISTER and len(ins.USE & use_f.use_regs) > 1): #esp is always in there.. pushes.append(ins) for push in pushes: # is there any case to have 'push esp' !? if len(push.USE) != 2 or "esp" not in push.USE: print "WEIRD push instruction !?:", push continue reg = (push.USE - set(("esp", ))).pop() # no need to put any extra check to the filter bellow for the leave case: # leave DEFs ebp and esp, and there is no way for reg=esp! reg_pops = filter(lambda x: reg in x.DEF, pops) if not reg_pops: # no pops, it's propably a push arg for a call continue true_reg_pops = [] for pop in reg_pops: def_f = _def_filter(reg) st, order = breadth_first_search(self.igraph, pop, def_f) if def_f.last_ins.mnem not in ("ret", "retn", "jmp"): # nooo #print "break for", pop, "at", def_f.last_ins continue true_reg_pops.append(pop) if true_reg_pops: self.reg_pairs.append((reg, push, true_reg_pops)) self.pre_regs.add(reg) # now we can safely tell which are the register-arguments self.arg_regs = use_f.use_regs - self.pre_regs # next, let's define the touched set #XXX not sure if we need to split this set to USE and DEF ones .. # should something that was only read in a function be considered as # a return value? .. maybe yes.. for ins in self.instrs: self.touches |= (ins.DEF | ins.USE) self.touches -= self.pre_regs if not self.arg_regs <= self.touches: print "BUG: how can arg_regs not be subset of touched?", self # final set (and most difficult) the return-value registers for ref, func_ea in self.code_refs_to: try: func = functions[func_ea] use_f = _use_filter() #XXX: be careful!, a reg might be used after a call instrs .. we have # to check in that case if this is a return value of this function or # of the other (a third one) #TODO: we can add this as a generic test in the filters to check for # CodeXfers to unanalyzed function st, order = breadth_first_search(func.igraph, func.code[ref], use_f) #self.ret_regs.append(use_f.use_regs & self.touches) self.ret_regs |= (use_f.use_regs & self.touches) except KeyError, e: pass
def test_breadth_first_search(self): gr = self.rel.parse_json_file('sakila.Tables.json.txt') st, order = breadth_first_search(gr,root='store') print (st) print (order)
def sample_lof_interactions(c, args, idx_to_sample, samples): lof = get_lof_genes(c, args, idx_to_sample) #file handle for fetching the hprd graph config = read_gemini_config(args=args) path_dirname = config["annotation_dir"] file_graph = os.path.join(path_dirname, 'hprd_interaction_graph') #load the graph using cPickle and close file handle gr = graph() f = open(file_graph, 'rb') gr = cPickle.load(f) f.close() #calculate nodes from the graph hprd_genes = gr.nodes() #initialize keys k = [] variants = [] if (not args.var_mode): for sample in lof.iterkeys(): lofvariants = list(set(lof[str(sample)])) for each in samples[str(sample)]: variants.append(each[0]) for gene in lofvariants: if gene in hprd_genes: x, y = \ breadth_first_search(gr,root=gene,\ filter=radius(args.radius)) gst = digraph() gst.add_spanning_tree(x) st, sd = shortest_path(gst, gene) # for each level return interacting genes # if they are variants in the sample. for rad in range(1, (args.radius + 1)): for key, value in sd.iteritems(): if (value == rad) and key in set(variants): k.append(key) if k: print "\t".join([str(sample), \ str(gene), \ str(rad)+"_order:", ",".join(k)]) else: print "\t".join([str(sample), \ str(gene), \ str(rad)+"_order:", \ "none"]) #initialize k k = [] #initialize variants list for next iteration variants = [] elif args.var_mode: for sample in lof.iterkeys(): lofvariants = list(set(lof[str(sample)])) var = samples[str(sample)] for gene in lofvariants: if gene in hprd_genes: x, y = \ breadth_first_search(gr,root=gene, \ filter=radius(args.radius)) gst = digraph() gst.add_spanning_tree(x) st, sd = shortest_path(gst, gene) for rad in range(1, (args.radius + 1)): for each in var: for key, value in sd.iteritems(): if value == rad and key == each[0]: print "\t".join([str(sample), \ str(gene), \ str(rad), \ str(key), \ str(each[1]), \ str(each[2]), \ str(each[3]), \ str(each[4]), \ str(each[5]), \ str(each[6]), \ str(each[7]), \ str(each[8]), \ str(each[9]), \ str(each[10]), \ str(each[11])])
def find_affected_nodes(self, ci_id): try: search_tree, pre = breadth_first_search(self.graph, ci_id) except KeyError: return [] return (search_tree, pre)