def test_grid_graph(self): """Tests random community generation with a graph of 100 precincts in a grid. """ G1 = graph() # A graph whose nodes are in a order of the grid. # Add nodes with Precinct object attributes. for x in range(10): for y in range(10): coords = Point(x * 10, y * 10).buffer(2) precinct = Precinct(0, coords, "North Montana", str(10 * x + y), 1, 2) G1.add_node(int(precinct.id), attrs=[precinct]) # Add edges so that degree can be calculated. for node in G1.nodes(): if node % 10 != 9: G1.add_edge((node, node + 1)) if node // 10 != 9: G1.add_edge((node, node + 10)) ordered_nodes = sorted(G1.nodes(), key=lambda n: len(G1.neighbors(n))) G2 = graph( ) # Graph whose node numbers correspond to their rank by degree. for node in G1.nodes(): G2.add_node(ordered_nodes.index(node), attrs=G1.node_attributes(node)) for node in G1.nodes(): G2_node = ordered_nodes.index(node) for neighbor in G1.neighbors(node): try: G2.add_edge((G2_node, ordered_nodes.index(neighbor))) except AdditionError: pass communities = create_initial_configuration(G2, 2) def get_color(node): precinct = G2.node_attributes(node)[0] for c, community in enumerate(communities): if precinct in community.precincts.values(): return COLORS[c] if SHOW: visualize_graph(G2, None, lambda v: G2.node_attributes(v)[0].centroid, colors=get_color, sizes=lambda x: 20, show=True)
def __init__(self): self.original_dungeon = [] self.dungeon = [] self.size = 0 self.LoopGraph = graph() self.FullGraph = graph() self.DistanceGraphOriginal = graph() self.DistanceGraph = graph() self.MonsterChar = "A" self.monster_location = '12' self.rogue_location = '12' self.is_infinitive = False self.all_the_moves = []
def test_graph_equality_labels(self): """ Graph equality test. This one checks node equality. """ gr = graph() gr.add_nodes([0, 1, 2]) gr.add_edge((0, 1), label="l1") gr.add_edge((1, 2), label="l2") gr2 = deepcopy(gr) gr3 = deepcopy(gr) gr3.del_edge((0, 1)) gr3.add_edge((0, 1)) gr4 = deepcopy(gr) gr4.del_edge((0, 1)) gr4.add_edge((0, 1), label="l3") assert gr == gr2 assert gr2 == gr assert gr != gr3 assert gr3 != gr assert gr != gr4 assert gr4 != gr
def test_complete_graph(self): gr = graph() gr.add_nodes(range(10)) gr.complete() for i in range(10): for j in range(10): self.assertTrue((i, j) in gr.edges() or i == j)
def test_graph_equality_attributes(self): """ Graph equality test. This one checks node equality. """ gr = graph() gr.add_nodes([0, 1, 2]) gr.add_edge((0, 1)) gr.add_node_attribute(1, ('a', 'x')) gr.add_node_attribute(2, ('b', 'y')) gr.add_edge_attribute((0, 1), ('c', 'z')) gr2 = deepcopy(gr) gr3 = deepcopy(gr) gr3.del_edge((0, 1)) gr3.add_edge((0, 1)) gr4 = deepcopy(gr) gr4.del_edge((0, 1)) gr4.add_edge((0, 1)) gr4.add_edge_attribute((0, 1), ('d', 'k')) gr5 = deepcopy(gr) gr5.del_node(2) gr5.add_node(2) gr5.add_node_attribute(0, ('d', 'k')) assert gr == gr2 assert gr2 == gr assert gr != gr3 assert gr3 != gr assert gr != gr4 assert gr4 != gr assert gr != gr5 assert gr5 != gr
def test_graph_equality_labels(self): """ Graph equality test. This one checks node equality. """ gr = graph() gr.add_nodes([0,1,2]) gr.add_edge((0,1), label="l1") gr.add_edge((1,2), label="l2") gr2 = deepcopy(gr) gr3 = deepcopy(gr) gr3.del_edge((0,1)) gr3.add_edge((0,1)) gr4 = deepcopy(gr) gr4.del_edge((0,1)) gr4.add_edge((0,1), label="l3") assert gr == gr2 assert gr2 == gr assert gr != gr3 assert gr3 != gr assert gr != gr4 assert gr4 != gr
def import_network_erl(data_file, eps): gr = graph() with open(data_file) as f: line = f.readline() while line != '': if not coin(eps): line = f.readline() continue link = line.split('"') #print link #_ = raw_input() n1 = link[1] n2 = link[3] #print n1,n2 if not gr.has_node(n1): gr.add_node(n1) gr.add_node_attribute(n1, ('col', n1)) if not gr.has_node(n2): gr.add_node(n2) gr.add_node_attribute(n2, ('col', n2)) if not gr.has_edge((n1, n2)): gr.add_edge((n1, n2)) #s = raw_input() line = f.readline() f.close() print "Graph has %i nodes and %i edges\n" % (len( gr.nodes()), len(gr.edges())) return gr
def calGraph(self): grafo = graph() for item in self.items: grafo.add_node(item, attrs=[("splines", "")]) for key, stream in self.streams.iteritems(): grafo.add_edge((stream[0], stream[1]), attrs=[("splines", "")]) return grafo
def genGraph(self): g = graph() csvReader = csv.reader(open('arcs.csv', 'rb')) for row in csvReader: # Cada fila es una arista algoritmos.__addVertex(self, g, row) # añadir vertice algoritmos.__addEdge(self, g, row) # añadir arista return g
def test_complete_graph(self): gr = graph() gr.add_nodes(xrange(10)) gr.complete() for i in xrange(10): for j in range(10): self.assertTrue((i, j) in gr.edges() or i == j)
def __init__(self): """ Initialize a hypergraph. """ self.node_links = {} # Pairing: Node -> Hyperedge self.edge_links = {} # Pairing: Hyperedge -> Node self.graph = graph() # Ordinary graph
def set_parameter(age,lambda_set,alpha_set,min_cluster,if_delete): """Initilization of SOINN, calling this function after training will reset the neural network for further training. age, lambda_set, alpha_set,min_cluster,if_delete are the SOINN parameters meaning max age, learning step, neuron clustering coefficient, minimum desired clusters and a choice whether to delete the neurons without neighbors in the final round of training.""" global age_max global nn_lambda global alpha global setN global accumulated global numbers global density global big_clusters global t global minimum_cluster global gr global delete_noise delete_noise = if_delete t = 1 setN = [] accumulated = [1,1] numbers = [1,1] setLabel = [0,0] big_clusters = [] density = [] nn_lambda = lambda_set age_max = age alpha = alpha_set minimum_cluster = min_cluster gr = graph() return
def drawGraph(self, inputs): fileName = 'planetModel.png' gr = graph() self.addGraphNode(1, gr, inputs) # Draw as PNG #with open("./planetModel.viz", 'wb') as f: #dot = write(gr,f) #f.write(dot) #gvv = gv.readstring(dot) #gv.layout(gvv,'dot') #gv.render(gvv,'png', fileName) #f.close() gst = digraph() self.addGraphNode(1, gst, inputs) with open("./planetModel.viz", 'wb') as f: #st, order = breadth_first_search(gst, root=1) #gst2 = digraph() #gst2.add_spanning_tree(gst.nodes()) #gst2.(1, 'post') dot = write(gst, f) f.write(dot) gvv = gv.readstring(dot) gv.layout(gvv, 'dot') gv.render(gvv, 'png', fileName) f.close() return fileName
def __init__(self, point_free_cb, line_free_cb, dimensions): """ Construct a randomized roadmap planner. 'point_free_cb' is a function that accepts a point (two-tuple) and outputs a boolean value indicating whether the point is in free space. 'line_free_cb' is a function that accepts two points (the start and the end of a line segment) and outputs a boolen value indicating whether the line segment is free from obstacles. 'dimensions' is a tuple of tuples that define the x and y dimensions of the world, e.g. ((-8, 8), (-8, 8)). It should be used when generating random points. """ self.point_free_cb = point_free_cb self.line_free_cb = line_free_cb self.dimensions = dimensions self.search_algorithm = euclidean() self.number_of_trials = 500 self.search_result = False self.output_path = None self.id_generator = 10000 self.graph = graph()
def import_network_erl(data_file,eps): gr = graph() with open(data_file) as f: line = f.readline() while line != '': if not coin(eps): line = f.readline() continue link = line.split('"') #print link #_ = raw_input() n1 = link[1] n2 = link[3] #print n1,n2 if not gr.has_node(n1): gr.add_node(n1) gr.add_node_attribute(n1, ('col', n1)) if not gr.has_node(n2): gr.add_node(n2) gr.add_node_attribute(n2, ('col', n2)) if not gr.has_edge((n1,n2)): gr.add_edge((n1,n2)) #s = raw_input() line = f.readline() f.close() print "Graph has %i nodes and %i edges\n" % (len(gr.nodes()), len(gr.edges()) ) return gr
def _update_hyki_graph(self): import re from pygraph.classes.graph import graph pattern = re.compile(LINK_RE) articles = HykiArticle.objects.all() gr = graph() if len(articles) > 0: for article in articles: if article.id not in gr.nodes(): self.create_node(gr, article) article_text = self._get_revision_text(article) results = pattern.findall(article_text) for link in results: _source = link[7].split('/')[1] _link = link[7].split('/')[2] if _source == 'hyki': #adjacency article links for _article in articles: #match link with article if _article.slug == _link: #doesnt point to itself if _article != article: #target node doesn't exist yet if _article.id not in gr.nodes(): self.create_node(gr, _article) gr.add_edge((article.id, _article.id)) else: if not gr.has_edge((_article.id,article.id)): gr.add_edge((article.id, _article.id)) self.graph_info = self.format_graph_to_thejit_tree(gr) else: self.graph_info = False
def build_graph(): gr = graph() for i in range(1,11): gr.add_node(i) gr.add_node_attribute(i, ('level', 0) ) gr.add_edge((1,3)) gr.add_edge((1,4)) gr.add_edge((2,3)) gr.add_edge((2,5)) gr.add_edge((5,3)) gr.add_edge((5,6)) gr.add_edge((6,3)) gr.add_edge((4,6)) gr.add_edge((6,7)) gr.add_edge((7,8)) gr.add_edge((7,10)) gr.add_edge((8,9)) gr.add_edge((9,10)) #gr.add_edge((2,8)) gr.add_node_attribute(3, ('in', 10)) return gr
def make_graph(data): gr = graph() with open(data) as f: line = f.readline() while line != '': link = line.split() n1 = int(link[0]) n2 = int(link[1]) #print n1,n2 if not gr.has_node(n1): gr.add_node(n1) gr.add_node_attribute(n1, ('col', n1)) if not gr.has_node(n2): gr.add_node(n2) gr.add_node_attribute(n2, ('col', n2)) if not gr.has_edge((n1,n2)): gr.add_edge((n1,n2)) #s = raw_input() line = f.readline() f.close() print "Graph has %i nodes and %i edges\n" % (len(gr.nodes()), len(gr.edges()) ) return gr
def __init__(self, point_free_cb, line_free_cb, dimensions): """ Construct a randomized roadmap planner. 'point_free_cb' is a function that accepts a point (two-tuple) and outputs a boolean value indicating whether the point is in free space. 'line_free_cb' is a function that accepts two points (the start and the end of a line segment) and outputs a boolen value indicating whether the line segment is free from obstacles. 'dimensions' is a tuple of tuples that define the x and y dimensions of the world, e.g. ((-8, 8), (-8, 8)). It should be used when generating random points. """ self.point_free_cb = point_free_cb self.line_free_cb = line_free_cb self.dimensions = dimensions self.dimensions_x = self.dimensions[0] self.dimensions_y = self.dimensions[1] self.graph = graph() self.number_nodes = 5 self.node_limit = 50 self.node_counter = 0
def build_graph(): gr = graph() for i in range(1, 11): gr.add_node(i) gr.add_node_attribute(i, ('level', 0)) gr.add_edge((1, 3)) gr.add_edge((1, 4)) gr.add_edge((2, 3)) gr.add_edge((2, 5)) gr.add_edge((5, 3)) gr.add_edge((5, 6)) gr.add_edge((6, 3)) gr.add_edge((4, 6)) gr.add_edge((6, 7)) gr.add_edge((7, 8)) gr.add_edge((7, 10)) gr.add_edge((8, 9)) gr.add_edge((9, 10)) #gr.add_edge((2,8)) gr.add_node_attribute(3, ('in', 10)) return gr
def drawGraph(self, inputs): fileName = 'planetModel.png' gr = graph() self.addGraphNode(1,gr, inputs) # Draw as PNG #with open("./planetModel.viz", 'wb') as f: #dot = write(gr,f) #f.write(dot) #gvv = gv.readstring(dot) #gv.layout(gvv,'dot') #gv.render(gvv,'png', fileName) #f.close() gst = digraph() self.addGraphNode(1,gst, inputs) with open("./planetModel.viz", 'wb') as f: #st, order = breadth_first_search(gst, root=1) #gst2 = digraph() #gst2.add_spanning_tree(gst.nodes()) #gst2.(1, 'post') dot = write(gst,f) f.write(dot) gvv = gv.readstring(dot) gv.layout(gvv,'dot') gv.render(gvv,'png', fileName) f.close() return fileName
def merge_graphs(g1, g2): """ Merge two graphs to a new graph (V, E) with V = g1.nodes \union g2.nodes and Edge e \in g1 or e \in g2 -> e \in E. """ if g1.DIRECTED or g2.DIRECTED: g = digraph() else: g = graph() for n in g1.nodes(): g.add_node(n) for n in g2.nodes(): if not n in g.nodes(): g.add_node(n) for e in g1.edges(): try: g.add_edge(e, g1.edge_weight(e)) except: logging.info("merge_graphs: adding edge %d %d failed" % (e[0], e[1])) for e in g2.edges(): try: g.add_edge(e, g2.edge_weight(e)) except: logging.info("merge_graphs: adding edge %d %d failed" % (e[0], e[1])) return g
def __init__(self): # graph that holds the street network self._graph = graph() self.bounds = None # give every street a sequential index (used for perfomance optimization) self.street_index = 0 self.streets_by_index = dict()
def test_add_spanning_tree(self): gr = graph() st = {0: None, 1: 0, 2:0, 3: 1, 4: 2, 5: 3} gr.add_spanning_tree(st) for each in st: self.assertTrue((each, st[each]) in gr.edges() or (each, st[each]) == (0, None)) self.assertTrue((st[each], each) in gr.edges() or (each, st[each]) == (0, None))
def calGraph(self): grafo = graph() for item in self.items: grafo.add_node(item, attrs = [("splines", "")]) for key, stream in self.streams.iteritems(): grafo.add_edge((stream[0], stream[1]), attrs = [("splines", "")]) return grafo
def test_graph_equality_attributes(self): """ Graph equality test. This one checks node equality. """ gr = graph() gr.add_nodes([0,1,2]) gr.add_edge((0,1)) gr.add_node_attribute(1, ('a','x')) gr.add_node_attribute(2, ('b','y')) gr.add_edge_attribute((0,1), ('c','z')) gr2 = deepcopy(gr) gr3 = deepcopy(gr) gr3.del_edge((0,1)) gr3.add_edge((0,1)) gr4 = deepcopy(gr) gr4.del_edge((0,1)) gr4.add_edge((0,1)) gr4.add_edge_attribute((0,1), ('d','k')) gr5 = deepcopy(gr) gr5.del_node(2) gr5.add_node(2) gr5.add_node_attribute(0, ('d','k')) assert gr == gr2 assert gr2 == gr assert gr != gr3 assert gr3 != gr assert gr != gr4 assert gr4 != gr assert gr != gr5 assert gr5 != gr
def teo_1(g, k): if k > len(g.nodes()): raise ValueError('FORBIDDEN: K > |V|') if k <= 0: raise ValueError('FORBIDDEN: K <= 0') # Caso base if k == 1: tree = graph() tree.add_node(1) return tree # Hipotese indutiva tree = teo_1(g, k-1) all_nodes = g.nodes() used_nodes = tree.nodes() external_nodes = [node for node in all_nodes if node not in used_nodes] r = [] for used_node in used_nodes: for external_node in external_nodes: if g.has_edge((used_node, external_node)): r.append((used_node, external_node)) new_edge = max(r, key=lambda e: g.edge_weight(e)) a, b = new_edge tree.add_node(b) tree.add_edge(new_edge) return tree
def test_add_empty_graph(self): gr1 = testlib.new_graph() gr1c = copy(gr1) gr2 = graph() gr1.add_graph(gr2) self.assertTrue(gr1.nodes() == gr1c.nodes()) self.assertTrue(gr1.edges() == gr1c.edges())
def create_graph(): g = graph() g.add_node("1") g.add_node("2") g.add_node("3") g.add_node("4") g.add_node("5") g.add_node("6") g.add_node("7") g.add_node("8") g.add_node("9") g.add_edge(("1", "2")) g.add_edge(("1", "4")) g.add_edge(("1", "5")) g.add_edge(("2", "3")) g.add_edge(("2", "4")) g.add_edge(("2", "5")) g.add_edge(("2", "6")) g.add_edge(("3", "5")) g.add_edge(("3", "6")) g.add_edge(("4", "5")) g.add_edge(("4", "7")) g.add_edge(("4", "8")) g.add_edge(("5", "6")) g.add_edge(("5", "7")) g.add_edge(("5", "8")) g.add_edge(("5", "9")) g.add_edge(("6", "8")) g.add_edge(("6", "9")) g.add_edge(("7", "8")) g.add_edge(("8", "9")) return g
def __init__(self, data, no_label=True, age_max=200, nn_lambda=70, alpha=2.0, del_noise=True, un_label=0): isoinn2.set_parameter(age_max, nn_lambda, alpha, 0, del_noise) timecost = time.time() t = 0 gr = graph() if no_label: for n_point in data: t += 1 isoinn2.step(n_point, un_label, t) else: for n_point in data: t += 1 n_data = list(n_point) n_X = array(n_data[0:-1]) n_Y = n_data[-1] isoinn2.step(n_X, n_Y, t) isoinn2.step(array([]), 0, -1) print('time cost', time.time() - timecost) self.nodes = isoinn2.setN self.gr = isoinn2.gr print(len(self.nodes))
def plot_graph(gr): """ draws the graph to file """ p = 100.0 / (len(gr.nodes())+1) gr_ext = graph() for node in gr.nodes(): if coin(p): if not gr_ext.has_node(node): gr_ext.add_node(node,attrs=gr.node_attributes(node)) for n in [ ed[0] for ed in gr.edges() if ed[1] == node ]: if coin(0.3): if not gr_ext.has_node(n): gr_ext.add_node(n,attrs=gr.node_attributes(n)) #print "Edges:",gr_ext.edges() if not gr_ext.has_edge((node,n)): gr_ext.add_edge((node,n)) dot = write(gr_ext) gvv = gv.readstring(dot) gv.layout(gvv,'dot') if args[1]== 'karate.txt': gv.render(gvv,'png','community1.png') elif args[1] == 'email.txt': gv.render(gvv,'png','community2.png') elif args[1] == 'hep-th-citations.txt': gv.render(gvv,'png','community3.png') elif args[1] == 'amazon1.txt': gv.render(gvv,'png','community4.png') elif args[1] == 'p2p-Gnutella30.txt': gv.render(gvv,'png','community5.png')
def getSSAroundSS(self, solarSystemID, jumps): ss = self.getSSInfo(solarSystemID) color = 0 if ss[2] > 0.5: color = "green" else: color = "red" ssRegion = colored(ss[0], color) ssName = colored(ss[1], color) ssSecruity = colored("%.1f" % ss[2], color) if self.ssgraph: gr = self.ssgraph else: gr = graph() nodes = self.getAllSS() gr.add_nodes(nodes) for edge in self.getAllSSEdges(): gr.add_edge(edge) print "Searching for Solar Systems around %s: %s(%s) in %d jumps." % (ssRegion, ssName, ssSecruity, jumps) ssinrad = breadth_first_search(gr, solarSystemID, radius(jumps)) ssinrad = ssinrad[1] text = "Found %d systems" % len(ssinrad) text = colored(text, "cyan") print "Done. %s, including current one." % text return ssinrad
def drawGraphFromSM(SM, names, outFile): fig = plt.figure(1) plot1 = plt.imshow(SM, origin='upper', cmap=cm.gray, interpolation='nearest') plt.show() gr = graph() namesNew = [] for i,f in enumerate(names): if sum(SM[i,:])>0: gr.add_nodes([f]) namesNew.append(f) Max = SM.max() Mean = mean(SM) Threshold = Mean * 1.5 for i in range(len(names)): for j in range(len(names)): if i<j: if SM[i][j] > 0: gr.add_edge((names[i], names[j])) # Draw as PNG dot = write(gr) gvv = gv.readstring(dot) gv.layout(gvv,'dot') gv.render(gvv,'png', outFile)
def read(string): """ Read a graph from a string in Dot language and return it. Nodes and edges specified in the input will be added to the current graph. @type string: string @param string: Input string in Dot format specifying a graph. @rtype: graph @return: Graph """ dotG = pydot.graph_from_dot_data(string) if dotG.get_type() == "graph": G = graph() elif dotG.get_type() == "digraph": G = digraph() elif dotG.get_type() == "hypergraph": return read_hypergraph(string) else: raise InvalidGraphType # Read nodes... # Note: If the nodes aren't explicitly listed, they need to be for each_node in dotG.get_nodes(): G.add_node(each_node.get_name()) for each_attr_key, each_attr_val in each_node.get_attributes().items(): G.add_node_attribute(each_node.get_name(), (each_attr_key, each_attr_val)) # Read edges... for each_edge in dotG.get_edges(): # Check if the nodes have been added if not dotG.get_node(each_edge.get_source()): G.add_node(each_edge.get_source()) if not dotG.get_node(each_edge.get_destination()): G.add_node(each_edge.get_destination()) # See if there's a weight if "weight" in each_edge.get_attributes().keys(): _wt = each_edge.get_attributes()["weight"] else: _wt = 1 # See if there is a label if "label" in each_edge.get_attributes().keys(): _label = each_edge.get_attributes()["label"] else: _label = "" G.add_edge((each_edge.get_source(), each_edge.get_destination()), wt=_wt, label=_label) for each_attr_key, each_attr_val in each_edge.get_attributes().items(): if not each_attr_key in ["weight", "label"]: G.add_edge_attribute( (each_edge.get_source(), each_edge.get_destination()), (each_attr_key, each_attr_val) ) return G
def _trim_digraph(original, head, tail): result = graph() result.add_nodes(original.nodes()) [result.add_edge(edge) for edge in original.edges()] result.del_node(head) result.del_node(tail) return result
def test_accessibility_on_very_deep_graph(): gr = graph() gr.add_nodes(range(0, 2001)) for i in range(0, 2000): gr.add_edge((i, i + 1)) recursionlimit = getrecursionlimit() accessibility(gr) assert getrecursionlimit() == recursionlimit
def test_accessibility_on_very_deep_graph(): gr = graph() gr.add_nodes(range(0,311)) # 2001 for i in range(0,310): #2000 gr.add_edge((i,i+1)) recursionlimit = getrecursionlimit() accessibility(gr) assert getrecursionlimit() == recursionlimit
def __init__(self, pairs_rates): self.trade_fee = 0.02 self.graph_depth = 5 self.pairs_rates = pairs_rates self.graph = graph() self.trade_graph = self.init_graph() self.path = self.paths_from_to(self.graph, "nvc", "trc") print "Paths: %s" % str(self.path)
def __init__(self, graph_container, threshold = 0): self.graph_container = graph_container self.gr = graph() self.weight_thresh = float(threshold) print "weight threshold set:: " + str(self.weight_thresh) self._buildGraph(self.graph_container)
def __init__(self, graph_attrs=[]): """ graph_attrs -- (list of 2-tuples of string literals) default graph attributes """ self.graph_attrs = graph_attrs self.gr = graph()
def test_cuttree0(self): G = graph() nations_of_the_world(G) ct = cut_tree( G ) import pdb pdb.set_trace()
def read(string): """ Read a graph from a string in Dot language and return it. Nodes and edges specified in the input will be added to the current graph. @type string: string @param string: Input string in Dot format specifying a graph. @rtype: graph @return: Graph """ dotG = pydot.graph_from_dot_data(string)[0] if (dotG.get_type() == "graph"): G = graph() elif (dotG.get_type() == "digraph"): G = digraph() elif (dotG.get_type() == "hypergraph"): return read_hypergraph(string) else: raise InvalidGraphType # Read nodes... # Note: If the nodes aren't explicitly listed, they need to be for each_node in dotG.get_nodes(): G.add_node(each_node.get_name()) for each_attr_key, each_attr_val in each_node.get_attributes().items(): G.add_node_attribute(each_node.get_name(), (each_attr_key, each_attr_val)) # Read edges... for each_edge in dotG.get_edges(): # Check if the nodes have been added if not G.has_node(each_edge.get_source()): G.add_node(each_edge.get_source()) if not G.has_node(each_edge.get_destination()): G.add_node(each_edge.get_destination()) # See if there's a weight if 'weight' in each_edge.get_attributes().keys(): _wt = each_edge.get_attributes()['weight'] else: _wt = 1 # See if there is a label if 'label' in each_edge.get_attributes().keys(): _label = each_edge.get_attributes()['label'] else: _label = '' G.add_edge((each_edge.get_source(), each_edge.get_destination()), wt = _wt, label = _label) for each_attr_key, each_attr_val in each_edge.get_attributes().items(): if not each_attr_key in ['weight', 'label']: G.add_edge_attribute((each_edge.get_source(), each_edge.get_destination()), \ (each_attr_key, each_attr_val)) return G
def parsimonous_protein_identification(peptides): """ parsimonous_protein_identification - takes a dict of the form {<peptide_seq>: <protein_name>, [<protein_na,e> ...] } and returns the proteins identified using parsimony. """ detected_proteins = {} protein2peptides = {} # start with the uniquely determined proteins for peptide, proteins in peptides.items(): if len(proteins) == 1: detected_proteins[proteins[0]] = [peptide] peptides.pop(peptide) else: for p in proteins: if not p in protein2peptides: protein2peptides[p] = [peptide] else: protein2peptides[p].append(peptide) # remaining peptides have multiple potential proteins, use parsimony g = graph() # identify protein clusters for peptide, proteins in peptides.items(): for protein in proteins: if not g.has_node(protein): g.add_node(protein) for p1, p2 in combinations(proteins, 2): if not g.has_edge((p1, p2)): g.add_edge((p1, p2)) connected = connected_components(g).items() clusters = {subgraph: set() for protein, subgraph in connected} for protein, subgraph in connected: clusters[subgraph] = clusters[subgraph].union(set((protein,))) def find_covering(proteins): peptides = set(chain(*(tuple(protein2peptides[p]) for p in proteins))) for k in range(1, len(proteins) + 1): for covering in combinations(proteins, k): covered = set(chain(*(tuple(protein2peptides[p]) for p in covering))) if len(covered) == len(peptides): return covering return None # find the minimal protein covering of each cluster for cluster in clusters.values(): covering = find_covering(cluster) if covering is None: print "Error, failed to cover " + str(subgraph) sys.exit(1) else: for protein in covering: detected_proteins[protein] = protein2peptides[protein] return detected_proteins
def sample_gene_interactions(c, args, idx_to_sample): #fetch variant gene dict for all samples get_variant_genes(c, args, idx_to_sample) #file handle for fetching the hprd graph file_graph = os.path.join(path_dirname, 'hprd_interaction_graph') #load the graph using cPickle and close file handle gr = graph() f = open(file_graph, 'rb') gr = cPickle.load(f) f.close() k = [] variants = [] #calculate nodes from the graph hprd_genes = gr.nodes() if args.gene == None or args.gene not in hprd_genes: sys.stderr.write("gene name not given else not represented in the p-p interaction file\n") elif args.gene in hprd_genes: x, y = \ breadth_first_search(gr,root=args.gene,filter=radius(args.radius)) gst = digraph() gst.add_spanning_tree(x) dot = write(gst) out.write(dot) st, sd = shortest_path(gst, args.gene) if args.var_mode: for sample in sam.iterkeys(): var = sam[str(sample)] #for each level return interacting genes if they are # variants in the sample. # 0th order would be returned if the user chosen # gene is a variant in the sample for x in range(0, (args.radius+1)): for each in var: for key, value in sd.iteritems(): if value == x and key == each[0]: print "\t".join([str(sample),str(args.gene), \ str(x), \ str(key), \ str(each[1]), \ str(each[2]), \ str(each[3])]) elif (not args.var_mode): for sample in sam.iterkeys(): for each in sam[str(sample)]: variants.append(each[0]) for x in range(0, (args.radius+1)): for key, value in sd.iteritems(): if value == x and key in set(variants): k.append(key) if k: print "\t".join([str(sample), str(args.gene), \ str(x)+"_order:", ",".join(k)]) else: print "\t".join([str(sample), str(args.gene), str(x)+"_order:", "none"]) #initialize keys for next iteration k = []
def get_strings (): with open('fingerprints.uniq', 'r') as f: fingerprints=[x.rstrip() for x in f.readlines()] gr = graph() gr.add_nodes(fingerprints) fs = dict() components = dict() for f in fingerprints: for h in (f[:15], f[16:]): if h not in fs: fs[h]=[] fs[h].append(f) # print(fs) edges=set() for shared in fs.values(): for f1 in shared: for f2 in shared: if f1 < f2 and hamming_distance(f1, f2) < 2: # print(f1.rstrip() + ":" + f2.rstrip()) # components[f1].extend(components[f2]) # components[f2]=components[f1] edges.add((f1, f2)) [gr.add_edge(e) for e in edges] components=connected_components(gr) real_components=dict() for (k,v) in connected_components(gr).items(): if v in real_components: real_components[v].append(k) else: real_components[v]=[k] all_consensus={k:consensus_string(v) for (k,v) in real_components.items()} all_consensus_distances={k:max_hamming_distance(v, real_components[k]) for (k,v) in all_consensus.items()} pp = pprint.PrettyPrinter(indent=4) for k in real_components.keys(): (chosen, cost) = (consensus_string(real_components[k]), all_consensus_distances[k]) if all_consensus_distances[k] > 1: c=center_string(real_components[k]) if max_hamming_distance(c, real_components[k]) < cost: (all_consensus_distances[k], all_consensus_distances[k]) = (c, max_hamming_distance(c, real_components[k])) # from real_components we compute map, which is a dictionary mapping a fingerprint # to its representative map={} for (k,v) in real_components.items(): map.update({(w, all_consensus[k]) for w in v}) with open('clusters', 'w') as f: pp = pprint.PrettyPrinter(indent=4,stream=f) pp.pprint(real_components) return(map)
def __init__(self): """ Initialize a hypergraph. """ common.__init__(self) labeling.__init__(self) self.node_links = {} # Pairing: Node -> Hyperedge self.edge_links = {} # Pairing: Hyperedge -> Node self.graph = graph() # Ordinary graph
def graph_pygraph(g): from pygraph.classes.graph import graph gr = graph() gr.add_nodes(range(len(g))) for inode, nl in enumerate(g): for ineighbour in nl: if ineighbour > inode: gr.add_edge((inode, ineighbour)) return gr
def test_add_spanning_tree(self): gr = graph() st = {0: None, 1: 0, 2: 0, 3: 1, 4: 2, 5: 3} gr.add_spanning_tree(st) for each in st: self.assertTrue((each, st[each]) in gr.edges() or (each, st[each]) == (0, None)) self.assertTrue((st[each], each) in gr.edges() or (each, st[each]) == (0, None))
def test_raise_exception_on_duplicate_node_addition(self): gr = graph() gr.add_node('a_node') try: gr.add_node('a_node') except AdditionError: pass else: fail()
def test_raise_exception_when_edge_added_from_non_existing_node(self): gr = graph() gr.add_nodes([0, 1]) try: gr.add_edge((3, 0)) except KeyError: pass else: fail() assert gr.node_neighbors == {0: [], 1: []}
def test_edges_between_different_nodes_should_be_a_single_arrow(self): gr = graph() gr.add_node(0) gr.add_edge((0, 0), label="label", attrs=[('key', 'value')]) assert (0, 0) in gr.edges() assert len(gr.edges()) == 1 assert gr.neighbors(0) == [0] assert (0, 0) in gr.edge_properties.keys() assert (0, 0) in gr.edge_attr.keys() assert len(gr.edge_attr[(0, 0)]) == 1
def graph(self): g = graph() for path in self.paths: for index, node in enumerate(path): if not node in g.nodes(): g.add_node(node) g.add_node_attribute(node, ('position', node)) if index: g.add_edge((path[index - 1], node)) return g
def test_raise_exception_when_edge_added_to_non_existing_node(self): gr = graph() gr.add_nodes([0, 1]) try: gr.add_edge((0, 3)) except KeyError: pass else: fail() assert gr.node_neighbors == {0: set(), 1: set()}
def doTextRank(self, error=0.001): if (self.textRank): return if (__DEBUG__): print 'Starting to Create Graph for', self.fn self.textRank = True self.graph = graph() self.graph.add_nodes(self.sentences()) for pair in itertools.combinations(self.sentences(), 2): #print pair[0].similarity(pair[1]) self.graph.add_edge(pair, wt=pair[0].similarity(pair[1])) # Remove sentences that are to dissimilar with any other sentence # Makes denominator in PR formula 0 # I dont really know what is supposed to be done here, Vig for s in self.sentences(): total = sum([ self.graph.edge_weight((s, n)) for n in self.graph.node_neighbors[s] ]) if total < 0.001: self.graph.del_node(s) self.sentences_.remove(s) #print len(self.sentences()) totalUpdate = 100.0 while totalUpdate > error: totalUpdate = 0 i = 0 for s in self.sentences(): oldValue = s.getScore() total = 0 for n in self.graph.node_neighbors[s]: wt = self.graph.edge_weight((s, n)) score = n.getScore() num = score * wt # sum of edge wieghts of all neighbours of n # Magnificent !! den = sum([ self.graph.edge_weight((n, m)) for m in self.graph.node_neighbors[n] ]) total += num / den s.setScore((1 - __DFACTOR__) + __DFACTOR__ * total) update = abs(s.getScore() - oldValue) totalUpdate += update logging.info('Text Rank Iteration, Error = %f' % totalUpdate)