def export(self): dialog = gtk.FileChooserDialog( title='Export graph as..', action=gtk.FILE_CHOOSER_ACTION_SAVE, buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_SAVE, gtk.RESPONSE_OK)) dialog.set_default_response(gtk.RESPONSE_OK) response = dialog.run() if response == gtk.RESPONSE_OK: f = dialog.get_filename() try: if f.endswith('pajek'): MMWritePajek(self.changed_graph, f) elif f.endswith('condor'): MMWriteCondor(self.changed_graph, f) else: igraph.save(self.changed_graph, f) logging.info('%s saved' % f) except IOError, e: logging.error(e) message = 'Error: %s\n\nIf you specified and invalid format, use one of the following extensions:\n\n%s' % ( e, '\n'.join(igraph.Graph._format_mapping.keys())) errordialog = gtk.MessageDialog(dialog, type=gtk.MESSAGE_ERROR, buttons=gtk.BUTTONS_CLOSE, message_format=message) errordialog.run() errordialog.destroy()
def LoadNetworkPrePPI(Uniprot2ENSG): PrePPI_Fil = "/Users/jiayao/Work/NB_proposal/dat/network/preppi_final600.txt" PrePPI = pd.read_csv(PrePPI_Fil, delimiter="\t") edges, weights = [], [] VerticeK = 0 Vertices = {} UnmappedUniprot = set([]) ENSGIDs = [] for i, row in PrePPI.iterrows(): prot1, prot2, weight = row["prot1"], row["prot2"], row["final_score"] try: g1, g2 = Uniprot2ENSG[prot1], Uniprot2ENSG[prot2] except: if prot1 not in Uniprot2ENSG: UnmappedUniprot.add(prot1) if prot2 not in Uniprot2ENSG: UnmappedUniprot.add(prot2) continue if g1 not in Vertices: Vertices[g1] = VerticeK VerticeK += 1 ENSGIDs.append(g1) if g2 not in Vertices: Vertices[g2] = VerticeK VerticeK += 1 ENSGIDs.append(g2) u, v = Vertices[g1], Vertices[g2] edges.append((u, v)) weights.append(float(weight)) PPI = ig.Graph(edges, edge_attrs={"weight": weights}) PPI.vs["ENSGID"] = ENSGIDs ig.save(PPI, "../dat/network/saved/PrePPI.gml") return PPI, UnmappedUniprot
def giancomponenet(G): ''' Simple function. Makes form a graph a giant componenet graph. ''' print "Making giant componenet" components = G.clusters(igraph.WEAK) gr2="giantcomponent.txt" igraph.save(components.giant(), gr2, format="ncol") G = igraph.Graph.Read_Ncol(open(gr2,"rb"),names=True, weights="if_present", directed=True) return G
def LoadPsychencodeRGN(Symbol2ENSG): Brain_Reg_Net2_csv = pd.read_csv( "../dat/network/psychencode/INT-14_ElasticNet_Filtered_Cutoff_0.1_GRN_2.csv" ) Brain_Reg_Net2_csv_trim = Brain_Reg_Net2_csv.drop_duplicates( subset=["Transcription_Factor", "Target_Gene"], keep="first") edges, regions, weights = [], [], [] TFs = [] Targets = [] VerticeK = 0 Vertices = {} UnmappedSymbols = set([]) ENSGIDs = [] for i, row in Brain_Reg_Net2_csv_trim.iterrows(): symbol1, symbol2, region, weight = row["Transcription_Factor"], row[ "Target_Gene"], row["Enhancer_Region"], row["Edge_Weight"] try: g1, g2 = Symbol2ENSG[symbol1], Symbol2ENSG[ symbol2] # Convert Symbol to ENSG ID, which we always does except: if g1 not in Symbol2ENSG: UnmappedSymbols.add(g1) if g2 not in Symbol2ENSG: UnmappedSymbols.add(g2) continue if g1 not in Vertices: Vertices[g1] = VerticeK VerticeK += 1 ENSGIDs.append(g1) if g2 not in Vertices: Vertices[g2] = VerticeK VerticeK += 1 ENSGIDs.append(g2) u, v = Vertices[g1], Vertices[g2] edges.append((u, v)) regions.append(region) weights.append(float(weight)) TF = row["Transcription_Factor"] Target = row["Target_Gene"] TFs.append(TF) Targets.append(Target) RGN = ig.Graph(edges, edge_attrs={ "weight": weights, "region": regions, "TF": TFs, "Taget": Targets }) RGN.vs["ENSGID"] = ENSGIDs ig.save(RGN, "../dat/network/saved/psychencode.rgn2.gml") return RGN
def export(self): dialog = gtk.FileChooserDialog(title='Export graph as..', action=gtk.FILE_CHOOSER_ACTION_SAVE, buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_SAVE,gtk.RESPONSE_OK)) dialog.set_default_response(gtk.RESPONSE_OK) response = dialog.run() if response == gtk.RESPONSE_OK: f = dialog.get_filename() try: if f.endswith('pajek'): MMWritePajek(self.changed_graph, f) elif f.endswith('condor'): MMWriteCondor(self.changed_graph, f) else: igraph.save(self.changed_graph, f) logging.info('%s saved' % f) except IOError, e: logging.error(e) message = 'Error: %s\n\nIf you specified and invalid format, use one of the following extensions:\n\n%s' % (e, '\n'.join(igraph.Graph._format_mapping.keys())) errordialog = gtk.MessageDialog(dialog, type=gtk.MESSAGE_ERROR, buttons=gtk.BUTTONS_CLOSE, message_format=message) errordialog.run() errordialog.destroy()
def save_graph(graph): """Saves scaffold graph in GML format""" igraph.save(graph, filename=SAVE_PATH + f'mysql_{USER}.gml')
def save_graph(graph): """Saves shortest paths graph in GML format.""" igraph.save(graph, filename=SAVE_PATH + f'mysql_{USER}_shortest.gml')
print(time() - t) t = time() fpoint = coord_to_str(f['geometry']['coordinates'][0][0]) lpoint = coord_to_str(f['geometry']['coordinates'][0][-1]) if fpoint not in G.vs['name']: G.add_vertex(fpoint) if lpoint not in G.vs['name']: G.add_vertex(lpoint) G.add_edge(fpoint, lpoint, length=f['properties']['Shape_Leng'], deposit_lake=f['properties'].get('deposit lake'), source_lake=f['properties'].get('source lake')) g.save(G, 'D/DNR HYDRO/corrected streams igraph.pickle') G = g.read('D/DNR HYDRO/corrected streams igraph.pickle') def upstream_lakes(G, dowlknum): df = pd.DataFrame(index=range(len(G.vs))) dep_es = G.es.select(deposit_lake_eq=dowlknum) if len(dep_es) == 0: return pd.DataFrame(columns=['lake', 'distance']) for i in range(len(dep_es)): df[str(i)] = G.shortest_paths_dijkstra(source=dep_es[i].source, weights='length')[0] df['short dist'] = df.apply(min, axis=1) df = df[df['short dist'] < np.inf] df = df[df['short dist'] >= 0] #now we have all attached vertices and the shortest difference to them
## Creating Big Graph in Igraph g = igraph.Graph(directed="yes") number = 0 for i in ListNodes: g.add_vertex(name=i) MapVertexToNode[number] = i number += 1 for key in DictEdges.keys(): g.add_edge(source=key[0], target=key[1], times=DictEdges[key][0], distances=DictEdges[key][1]) pickle_graph = open("FullGraph.pickle", "wb") igraph.save(g, pickle_graph) print(g.es) ### LOADING GRAPH #pickle_in = open("FullGraph.pickle","rb") #g = pickle.load(pickle_in) #print(g) #pickle_in.close() """The following procedures must be used to compute the graphs and save them in a dictionary, but codes must be adjusted properly. Two dictionaries were needed, as one had more information for the usual origin-destination pairs while the other just saved distances and times to speed up calculations. If the graph is small enough, the same dictionary could be used as both inputs (because they would be not memory issues)""" # Computing the 10000*969 dict