def toplevel_to_networkx(): """lkbutils.rdflib_to_networkx is available.""" from lkbutils import rdflib_to_networkx rdflib_graph = Fixture.graph_sample.rdflib_graph converted_on_toplevel = rdflib_to_networkx(rdflib_graph) converted_on_instance = rdflib_model.to_networkx(rdflib_graph) # equality of nodes, edges + adge attributes assert converted_on_toplevel.nodes() == converted_on_instance.nodes() assert (list(networkx.generate_edgelist(converted_on_toplevel)) == list(networkx.generate_edgelist(converted_on_instance)))
def encode_gps_full(gps, base=10): "converts a global parameter set object into a string representation" gps_encoding = "" totallength = 0 nodes = gps.keys() edges = [] for node in sorted(nodes): # first reverse engineer contexts, predecessors, lps contexts = gps[node].keys() n = len(contexts) preds = sorted(list(set().union(*contexts))) edges += [(pred, node) for pred in preds] #print node lps = gps[node] #print "lps (from encode_gps):", lps # then encode current lps code = encode_lps(preds, lps, base) # finally shift digits and add n-k zeros k = len(str(code)) codestring = (n-k)*'0' + str(code) gps_encoding = codestring + gps_encoding totallength += n IG = nx.DiGraph() IG.add_edges_from(edges) IGstring = "_".join(nx.generate_edgelist(IG, data=False)) finalencoding = gps_encoding.zfill(totallength)+"."+IGstring return finalencoding
def updateGraph(nGraph, net): n2 = nx.convert_node_labels_to_integers(nGraph) edgelist = nx.generate_edgelist(n2, data=False) edge_export = [] for pair in edgelist: nodes = pair.split(' ') edge_export.append(int(nodes[0])) edge_export.append(int(nodes[1])) net.controlTask.UpdateGraph(edge_export)
def test_networkx_module(self): my_graph = ExtendedGraph() my_graph.add_edge(1, 2, weight=4.7) my_graph.add_node(42) self.assertEqual(sorted(my_graph.nodes()), [1, 2, 42]) self.assertEqual(sorted(my_graph.edges()), [(1, 2)]) self.assertEqual( list(nx.generate_edgelist(my_graph)), ["1 2 {'weight': 4.7}"])
def write_input_file(G, stress_budget, path): with open(path, "w") as fo: n = len(G) s_total = stress_budget lines = nx.generate_edgelist(G, data=["happiness","stress"]) fo.write(str(n) + "\n") fo.write(str(s_total) + "\n") fo.writelines("\n".join(lines)) fo.close()
def netx_to_csv(func_graph): """ Convert a networkx graph to csv """ out_string = '<p>' for line in nx.generate_edgelist(func_graph, delimiter=','): """ source, target, weight """ edge = line.replace('\'', '').replace('{weight:', '').replace('}', '').replace(' ', '') out_string += edge + '<br>' out_string += '<\p>' return out_string
def graph2walks(self, method="", params={}): self.params = params if method == "deepwalk": number_of_walks = self.params['number_of_walks'] walk_length = self.params['walk_length'] alpha = self.params['alpha'] # Temporarily generate the edge list with open("./temp/graph.edgelist", 'w') as f: for line in nx.generate_edgelist(self.graph, data=False): f.write("{}\n".format(line)) dwg = deepwalk.load_edgelist("./temp/graph.edgelist", undirected=True) corpus = deepwalk.build_deepwalk_corpus(G=dwg, num_paths=number_of_walks, path_length=walk_length, alpha=alpha, rand=random.Random(0)) elif method == "node2vec": number_of_walks = self.params['number_of_walks'] walk_length = self.params['walk_length'] p = self.params['p'] q = self.params['q'] for edge in self.graph.edges(): self.graph[edge[0]][edge[1]]['weight'] = 1 G = node2vec.Graph(nx_G=self.graph, p=p, q=q, is_directed=False) G.preprocess_transition_probs() corpus = G.simulate_walks(num_walks=number_of_walks, walk_length=walk_length) else: raise ValueError("Invalid method name!") """ new_corpus = [] line_counter = 0 line = [] for walk in corpus: if line_counter < self.params['number_of_walks']: line.extend(walk) line_counter += 1 else: line_counter = 0 new_corpus.append(line) line = [] corpus = new_corpus """ self.corpus = corpus return self.corpus
def graphStats(G,detailed=False): # Can probably do more https://networkx.github.io/documentation/latest/reference/functions.html print "-------------------------------------" print "Number of edges in the graph: ", G.number_of_edges() print "Number of nodes in the graph: ", G.number_of_nodes() print "-------------------------------------" if detailed: print "Detailed graph statistics:" for line in nx.generate_edgelist(G): print(line)
def load_edge_index(file = 'cora.edges', path = '../dataset/cora/'): G = nx.read_edgelist(path + file, nodetype=int, delimiter = ',', data=(('weight',float),), create_using=nx.DiGraph()) edge_index = [] for line in nx.generate_edgelist(G, data=False): line = line.split(' ') _from_, _to_ = int(line[0]), int(line[1]) edge_index.append([_from_, _to_]) edge_index = np.array(edge_index, dtype=np.int).T edge_index = torch.from_numpy(edge_index) return edge_index
def get_edgelist(self): edgelist = [] for edge in nx.generate_edgelist(self.g_nx): edgelist.append(str(edge).strip('{}')) el = [] for edge in edgelist: splitted = edge.split() numeric = map(float,splitted) el.append(list(numeric)) return el
def write_dot_helper(G, path, encoding='utf-8'): #a simplified implementation of dot writer #needed in the Windows platform where pygraphviz is not available #loses label information with open(path, mode='wb') as f: header = 'strict graph ' + getattr(G, 'name', 'replica') + ' {\n'.encode(encoding) f.write(header) for line in nx.generate_edgelist(G, ' -- ', False): line =' %s;\n'%line f.write(line.encode(encoding)) f.write('}\n'.encode(encoding))
def _get_basepair_list(self, graph): """ Accepts single graph as input. Returns a list of all base pairs in the folding. """ list_bpairs = [] for line in nx.generate_edgelist(graph): if line.find('basepair') > 0: list_bpairs.append( (int(line.split(' ')[0]), int(line.split(' ')[1]))) return list_bpairs
def depgraph(request): """ A view to generate and render the puzzle dependency graph visualization """ hunt = Hunt.objects.get(is_current_hunt=True) G = nx.DiGraph() for puzzle in hunt.puzzle_set.all(): for unlock in puzzle.unlocks.all(): G.add_edge(unlock.puzzle_number, puzzle.puzzle_number) edges = [line.split(' ') for line in nx.generate_edgelist(G, data=False)] context = {'puzzles': hunt.puzzle_set.all(), 'edges': edges} return render(request, 'depgraph.html', context)
def save_edge_list(self, save_as='edges.csv', data=True): """Exporta la lista de aristas para ser utilizados por otros programas de analisis de redes""" g = self.g output = "Source,Target,Weight\n" edge_list = [i.split(',') for i in nx.generate_edgelist(g, delimiter=',', data=['weight'])] for i in edge_list: output += "%s,%s,%s\n"%(self.vertex_labels[int(i[0])], self.vertex_labels[int(i[1])], i[2]) output_name = save_as with open(output_name, 'w') as file: file.write(output) file.close
def main(): opts = get_options() random.seed(opts.seed) G = nx.read_edgelist(opts.input, nodetype=int) G = optimize(G, GraphConfig(opts.size, opts.r), opts.step, opts.log) os = open(opts.output, 'w') if opts.output else sys.stdout for e in nx.generate_edgelist(G, data=False): print(e, file=os) if opts.output: os.close()
def get_list_of_edges(self): edge_list_tuples = [] for edge in nx.generate_edgelist(self.G, data=False): if (int(edge.split(" ")[0]) < int(edge.split(" ")[1])): edge_list_tuples.append( (int(edge.split(" ")[0]), int(edge.split(" ")[1]))) else: edge_list_tuples.append( (int(edge.split(" ")[1]), int(edge.split(" ")[0]))) return (edge_list_tuples)
def __init__(self, n=0, d=4, c=1.5, lam=1, beta=1, falling_probability=0, model=None, starting_edge_list=[], edge_birth_rate=None, edge_death_rate=None): self.G = nx.Graph() self.entering_nodes = [] self.G.add_nodes_from([i for i in range(0, n)]) self.n = n self.d = d self.tolerance = int(c * d) self.c = c self.inrate = lam self.outrate = beta self.target_n = self.set_target_size(lam, beta) self.p = falling_probability self.flooding = Flooding() self.consensus = Consensus() self.initial_colors = 2 self.consensus_bias = [0.5, 0.5] self.number_of_exiting_nodes_at_each_round = [] self.number_of_entering_nodes_at_each_round = [] self.type_of_dynamic_graph = model self.converged = False self.target_density = self.target_size_achieved() self.max_label = n self.birth_rate = edge_birth_rate self.death_rate = edge_death_rate self.semiregular_percentage = 100 self.time_conv = 0 self.reset_number = 0 self.t = 0 self.max_label = -1 if (starting_edge_list): self.G.add_edges_from(starting_edge_list) if (model == "EdgeMarkovian"): kn = nx.complete_graph(n) self.kn_edges = [] for e in nx.generate_edgelist(kn, data=False): if (int(e.split(" ")[0]) < int(e.split(" ")[1])): self.kn_edges.append( (int(e.split(" ")[0]), int(e.split(" ")[1]))) else: self.kn_edges.append( (int(e.split(" ")[1]), int(e.split(" ")[0]))) else: self.kn_edges = []
def _find_paired_nodes(self, graph): """ Returns a list containing all paired nodes in a graph. """ paired_list = [] for line in nx.generate_edgelist(graph): if ('basepair' in line): if not (int(line.split(' ')[0]) in paired_list): paired_list.append(int(line.split(' ')[0])) if not (int(line.split(' ')[1]) in paired_list): paired_list.append(int(line.split(' ')[1])) return paired_list
def read_graph_list(dataset_folder): dataset_list = [] with open(root_path + '/data/' + dataset_folder + '.pkl', 'rb') as tf: graph_set = pickle.load(tf) for graph in graph_set: edge_list = [] for line in nx.generate_edgelist(graph, data=False): edge_list.append( [int(line.split(' ')[0]), int(line.split(' ')[1])]) dataset_list.append(edge_list) return dataset_list, graph_set
def create_graph(): G = nx.read_adjlist("edge_list.adjlist", create_using=nx.DiGraph()) f = open('edge_list.csv', 'w') f.write("Source,Target\n") for line in nx.generate_edgelist(G, data=False): f.write(line[0]) f.write(',') f.write(line[2]) f.write('\n') f.close()
def generateTRAGraph(patient): ''' This function generates a graph per patient representing the traslocations of this patient. vertex: Chromosomes edge: the number of traslocations between each chromosome Input: patient(string): The patient id. Output: graph: networkx format edge_list: List with the format: node1 node2 weight (edge between node1 and node2 with weight weight) ''' patient_path = PATIENTS_PATH + '/' + patient + '.vcf.tsv' # Load the patient breaks, and select only the traslocations patient_breaks = pd.read_csv(patient_path, sep='\t', index_col=None) # patient_breaks['chrom2'] = patient_breaks['chrom2'].map(str) only_TRA = patient_breaks.loc[patient_breaks['svclass'] == 'TRA'] # The crosstab is equivalent to the adjacency matrix, so we use this to calculate it ct_tra = pd.crosstab(only_TRA['#chrom1'], only_TRA['chrom2']) ct_tra.index = ct_tra.index.map(str) adjacency_matrix_connected_only = ct_tra aux = pd.DataFrame(0, columns=chromosomes, index=chromosomes) aux.index = aux.index.map(str) ct_tra = aux.add(ct_tra, fill_value=0) aux = None # Reorder ct_tra = ct_tra.reindex(index=natsorted(ct_tra.index)) ct_tra = ct_tra[chromosomes] # change the values to int ct_tra = ct_tra.astype(int) # Generate the adjacency matrix adjacency_matrix = pd.DataFrame(data=ct_tra.values, columns=chromosomes, index=chromosomes) # print(adjacency_matrix) graph = nx.from_pandas_adjacency(adjacency_matrix) graph.to_undirected() # Remove isolated vertices graph.remove_nodes_from(list(nx.isolates(graph))) edge_list = nx.generate_edgelist(graph, data=['weight']) return graph, edge_list
def netx_to_json(func_graph): """ Convert a networkx graph to json """ func_edge_list = [] for line in nx.generate_edgelist(func_graph, delimiter=','): """ {'source':string, 'target': string, 'weight': integer} """ edge_entry = {} edge = line.replace('\'', '').replace('{weight:', '').replace('}', '').replace(' ', '').split(',') edge_entry['source'] = edge[0] edge_entry['target'] = edge[1] edge_entry['weight'] = edge[2] func_edge_list.append(edge_entry) return func_edge_list
def generate_graphs(nodes, edges, graphType, outputFile): file = open(outputFile, "w") if graphType == True: graph = nx.dense_gnm_random_graph(nodes, edges) else: graph = nx.gnm_random_graph(nodes, edges, directed=False) for u, v in graph.edges(): graph.add_weighted_edges_from([(u, v, random.randint(0, 100))]) file.write(str(nodes) + " " + str(edges) + "\n") for line in nx.generate_edgelist(graph, data=['weight']): file.write(line + "\n")
def test_init_weightedgraph_wstrings(self): """also tests translate_data""" adjacency_data = [ "1 4,10 3,20", "2 3,50", "3 1,20 2,50", "4 1,10" ] my_graph = ExtendedGraph.init_weightedgraph_wstrings(adjacency_data) self.assertEqual(sorted(my_graph.nodes()), [1, 2, 3, 4]) self.assertEqual(sorted(my_graph.edges()), [(1, 3), (1, 4), (2, 3)]) self.assertEqual( list(nx.generate_edgelist(my_graph)), ["1 3 {'weight': 20}", "1 4 {'weight': 10}", "2 3 {'weight': 50}"])
def get_graph_to_file(graph, regular): lines = nx.generate_edgelist(graph, data=False) k = graph.number_of_edges() n = graph.number_of_nodes() name = '' if regular: name = f"{k * 2 //n}-regular_graph_with_{n}_vertices.col" else: name = f"random_{k}_edges_graph_{n}_vertices.col" with open(name, 'w+') as f: f.write(f"p edge {n} {k}\n") for line in lines: nodes = line.split(' ') f.write("e " + f"{int(nodes[0])+1} {int(nodes[1]) + 1}" + '\n')
def duplicateEdges_test(graphfile): G = graphfile edges = set() duplicates = set() for line in nx.generate_edgelist(G): key = str(line) if key not in edges: edges.add(key) else: duplicates.add(key) return len(duplicates)
def create_star(self,node): sequence = create_degree_sequence(node, powerlaw_sequence, exponent=2.5) graph = nx.configuration_model(sequence) loops = graph.selfloop_edges() graph = nx.Graph(graph) graph.remove_edges_from(loops) components = sorted(nx.connected_components(graph), key=len, reverse=True) lcc = graph.subgraph(components[0]) #pos = nx.spring_layout(lcc) #nx.draw_networkx(lcc, pos) graph = list(nx.generate_edgelist(lcc)) edges = lcc.edges() #print(edges) flat = list(sum(edges, ())) return edges, max(flat,key=flat.count)
def getSimilarityScore(graph_list): simScore = 0 for x in graph_list: graph = nx.read_graphml(x + '.graphml') graph_edgelist = list(nx.generate_edgelist(graph, data=False)) mcode_graph = mcode(graph_edgelist) largest_cluster_graph = max(mcode_graph, key=len) intersections = len( set(largest_cluster_graph).intersection(set(largest_cluster_G))) simScore = simScore + ( (intersections * 2) / (len(largest_cluster_graph) + len(largest_cluster_G))) print(simScore) avgSimScore = simScore / 10 return avgSimScore
def write_input_file(G, path): """ Write a graph to the input file format Args: G: NetworkX Graph, Graph to write to file path: str, path to input file Returns: None """ with open(path, "w") as fo: n = len(G) lines = nx.generate_edgelist(G, data=["weight"]) fo.write(str(n) + "\n") fo.writelines("\n".join(lines)) fo.close()
def find_k_shortest_paths(self, k): """ finds k shortest paths using PathLinker. This uses a roundabout, verbose method by writing to files, and calling PathLinker via the os. Assumes all edges have weight 1""" assert(self.nx_graph is not None and "should be made by now") fn_net = "tmp_net.txt" # a file specifying the network structre fn_src_tgt = "tmp_src_tgt.txt" # a file specifying the sources and targets with open(fn_net, 'w') as f: f.write("#Node1\tNode2\n") for line in nx.generate_edgelist(self.nx_graph, data=False): nodes = line.strip().split() f.write(str(nodes[0]) + "\t" + str(nodes[1]) + "\t1") f.write("\n") # generate sources and targets by grabing all nodes which have attribute both_fragments = [Fragment(string=s) for s,d in self.nx_graph.nodes_iter(data=True) if d['frag'] == 'both'] sources = list(filter(lambda x: x.is_before(self.deletion), both_fragments)) targets = list(filter(lambda x: self.deletion.is_before(x), both_fragments)) with open(fn_src_tgt, 'w') as f: f.write("#Node Node type\n") for src in sources: f.write(str(src) + "\tsource") f.write("\n") for tgt in targets: f.write(str(tgt) + "\ttarget") f.write("\n") # run PathLinker s = "python ../PathLinker/PathLinker.py " + fn_net + " " + fn_src_tgt os.system(s) # read from file fn = "out_k_{}-ranked-edges.txt".format(k) with open(fn, 'r') as f: for line in f: if line[0] == '#': continue l = line.split() tail = Fragment(l[0]) head = Fragment(l[1]) index = l[2]
def slurp_pickled_nx(graphpath=None, featurespath=None): G = nx.read_gpickle(graphpath) Xtr = np.load(featurespath) ecount = count() enames = ddict(ecount.__next__) subs = [] for line in nx.generate_edgelist(G, data=False): i, j, w = parse_space(line) if i.isalnum(): i = int(i) if j.isalnum(): j = int(j) if i == j: continue subs.append((enames[i], enames[j], w)) idx = th.from_numpy(np.array(subs, dtype=np.int)) objects = Gintdict_to_list(dict(enames), G) print(f'slurp: objects={len(objects)}, edges={len(idx)}') return idx, objects
def pgd_graphlet_counts(self, n_threads=4) -> Dict: """ Return the dictionary of graphlets and their counts - based on Neville's PGD :return: """ pgd_path = Path(get_imt_input_directory()).parent / 'src' / 'PGD' graphlet_counts = {} if 'Linux' in platform.platform() and (pgd_path / 'pgd_0').exists(): edgelist = '\n'.join(nx.generate_edgelist(self.graph, data=False)) edgelist += '\nX' # add the X dummy_path = f'{pgd_path}/dummy.txt' try: bash_script = f'{pgd_path}/pgd_0 -w {n_threads} -f {dummy_path} -c {dummy_path}' #pipe = sub.run(bash_script, shell=True, capture_output=True, input=edgelist.encode(), check=True, timeout=30000) pipe = sub.run(bash_script, shell=True, capture_output=True, input=edgelist.encode(), check=True) output_data = pipe.stdout.decode() except sub.TimeoutExpired as e: CP.print_blue(f'PGD timeout!{e.stderr}') graphlet_counts = {} except sub.CalledProcessError as e: CP.print_blue(f'PGD error {e.stderr}') graphlet_counts = {} except Exception as e: CP.print_blue(str(e)) graphlet_counts = {} else: # pgd is successfully run for line in output_data.split('\n')[:-1]: # last line blank graphlet_name, count = map(lambda st: st.strip(), line.split('=')) graphlet_counts[graphlet_name] = int(count) else: CP.print_red(f'PGD executable not found at {pgd_path}/pgd') graphlet_counts = {} self.stats['pgd_graphlet_counts'] = graphlet_counts return graphlet_counts
def perform_random_walks(self, output_node_corpus_file): if not ('number_of_walks' and 'walk_length') in self.params.keys() or self.graph is None: raise ValueError("Missing parameter !") self.number_of_nodes = self.graph.number_of_nodes() self.N = self.number_of_nodes * self.params['number_of_walks'] self.L = self.params['walk_length'] initial_time = time.time() # Generate a corpus if self.params['random_walk'] == "deepwalk": if not ('dw_alpha') in self.params.keys(): raise ValueError("A parameter is missing!") # Temporarily generate the edge list with open(os.path.join(self.temp_folder, "graph_deepwalk.edgelist"), 'w') as f: for line in nx.generate_edgelist(self.graph, data=False): f.write("{}\n".format(line)) dwg = deepwalk.load_edgelist(os.path.join(self.temp_folder, "graph_deepwalk.edgelist"), undirected=True) self.corpus = deepwalk.build_deepwalk_corpus(G=dwg, num_paths=self.params['number_of_walks'], path_length=self.params['walk_length'], alpha=self.params['dw_alpha']) elif self.params['random_walk'] == "node2vec": if not ('n2v_p' and 'n2v_q') in self.params.keys(): raise ValueError("A missing parameter exists!") for edge in self.graph.edges(): self.graph[edge[0]][edge[1]]['weight'] = 1 G = node2vec.Graph(nx_G=self.graph, p=self.params['n2v_p'], q=self.params['n2v_q'], is_directed=False) G.preprocess_transition_probs() self.corpus = G.simulate_walks(num_walks=self.params['number_of_walks'], walk_length=self.params['walk_length']) else: raise ValueError("Invalid method name!") self.save_corpus(output_node_corpus_file, with_title=False) print("The corpus was generated in {:.2f} secs.".format(time.time() - initial_time))
def generate_tests(graphs, filename): for i, G in enumerate(graphs): # Use only the first 10 graphs if i == 5: break title = filename.format(i) # Select the source and target nodes base on their centrality nodes = [] labels = {} weights = [] for node, centrality in nx.betweenness_centrality(G).items(): nodes.append(node) labels[node] = str(node) weights.append(1.0 - centrality) S, T = random.choices(nodes, weights, k=2) # Source and target nodes should be different while S == T: S, T = random.choices(nodes, weights, k=2) lines = list(nx.generate_edgelist(G, data=False)) lines = ["{} {} {}".format(len(lines), S, T)] + lines with open(title + ".txt", "w") as f: f.write("\n".join(lines)) lines = [] with open(title + "-result.txt".format(i), "w") as f: for path in nx.all_simple_paths(G, S, T): length = len(path) - 1 links = " => ".join(map(str, path)) lines += ["{} # {}".format(length, links)] f.write("\n".join(lines)) fig = plt.figure(figsize=[6, 6]) ax = plt.gca() nx.draw(G, with_label=True, labels=labels) fig.suptitle(title + ".png") plt.tight_layout(0.9) fig.savefig(title + ".png")
def draw_graph(self, vertices, edges): self.ax.clear() self.toolbar.update() # TODO: Change the type of graph so only a complete graph is generated # NetworkX Graph graph = nx.dense_gnm_random_graph(vertices, edges) pos = nx.spring_layout(graph) # Draw Nodes nx.draw_networkx_nodes(graph, pos, ax=self.ax, node_size=700) # Draw Edges nx.draw_networkx_edges(graph, pos, width=2) # Draw labels` nx.draw_networkx_labels(graph, pos, font_size=10, font_family='sans-serif') # Tight Figure Layout plt.tight_layout() # Turn off the axis plt.axis('off') # Save the existing random graph for graph coloring. # Can save the nodes as a list of distinct edges. Then call the add_edge # function to recreate the exact same graph. # Create a file to store the list of edges graph_edge_list = open("Random Graph.txt", "w+") # Save the graph in a txt file total_num_edges = graph.number_of_edges() graph_edge_list.write(str(total_num_edges)) graph_edge_list.write("\n") for edge in nx.generate_edgelist(graph, delimiter=' ', data=False): graph_edge_list.write(edge) graph_edge_list.write("\n") # Drawing the figure using the renderer self.canvas.draw() # Positioning the canvas using pack self.canvas.get_tk_widget().pack(side="left", fill="both", expand=True) # Print Out some Graph info global graph_info graph_info = nx.info(graph) global nodes nodes = graph.nodes()
def makePutativeClusters(tree_dir, bestReciprocalHits): BlastParse.logger.info( "len(best hits nodes) %d %d" % (len(bestReciprocalHits.nodes()), len(bestReciprocalHits.edges()))) subs = list( nx.weakly_connected_component_subgraphs(bestReciprocalHits) ) # only need weakly connected because the graph is built so that only reciprocal hits are part of the graph count = 1 orphan_file = tree_dir + "orphan_genes.txt" orphans = open(orphan_file, 'w') BlastParse.logger.info("len(subs) = %s" % (len(subs))) # map child genes to rough clusters and vice versa geneToCluster = {} graphs = {} gene_count = 0 for s in subs: # each subgraph is an initial cluster s2 = [s] for s in s2: clusterID = "cluster_" + str(count) if len(s.nodes()) > 1: graphs[clusterID] = nx.generate_edgelist( s) # data=True is default else: orphans.write(s.nodes()[0] + "\n") for locus in s.nodes(): geneToCluster[locus] = clusterID count += 1 gene_count += len(s.nodes()) orphans.close() BlastParse.logger.info("gene count = %d" % (gene_count)) BlastParse.logger.info("count = %d" % (count)) with open(tree_dir + "gene_to_cluster.pkl", "w") as f: pickle.dump(geneToCluster, f) with open(tree_dir + "cluster_graphs.dat", "w") as f: for clusterID in graphs: f.write(clusterID + "\n") for line in graphs[clusterID]: f.write(line + "\n") f.write("//\n") return 0
def detect_duplicatesEdges(graphfile, head=10, load="graphfile"): if load == "graphfile": G = nx.read_graphml(str(graphfile)) elif load == "loaded_Graph": G = graphfile else: print "Invalid load option. Please leave blank and specify a graphfile to read or provide a preloaded graphfile and select the option 'loaded_Graph'." infile = str(graphfile).split('_', 1)[0] edges = set() duplicates = set() for line in nx.generate_edgelist(G): #print line key = str(line) if key not in edges: edges.add(key) else: duplicates.add(key) if len(duplicates) > 0: duplicates = list(duplicates) if head < len(duplicates): print "DUPLICATES: ", head, " random duplicate values displayed...", '\n' for line in duplicates[0:head]: print random.choice(duplicates) print '\n', (len(duplicates) - head), " duplicate values not displayed..." else: print "DUPLICATES: ", len( duplicates), " random duplicate values displayed...", '\n' for line in duplicates: print random.choice(duplicates) print '\n', "-" * 50, '\n', len( duplicates), "total duplicate values found.", '\n' return "DUPLICATES FOUND" else: print '\n', "-" * 50, '\n', "NO DUPLICATES FOUND" return "NO DUPLICATES FOUND"
def generate(vertices, chance): edges = int((vertices * vertices) * chance) print "vertices: {}, edges: {}".format(vertices, edges) G = nx.gnm_random_graph(vertices, edges) for (u, v, w) in G.edges_iter(data=True): w["weight"] = random.randint(1, 20) / 20.0 out = open("{}_vertices_{}_edges".format(vertices, edges), "w") source = random.randint(0, vertices) dest = source while dest == source: dest = random.randint(0, vertices) out.write("1\n{} {} {} {} 20 100\n".format(vertices, edges, source, dest)) for line in nx.generate_edgelist(G, data=["weight"]): out.write(line + "\n") B = set() while len(B) < 100: B.add(random.randint(0, vertices)) for b in B: out.write(str(b) + "\n") out.close()
import networkx as nx # Choose random graph to generate G = nx.lollipop_graph(10, 20) # Print graph to file f = open("testGraph.dim", "w") f.write("p edge " + str(G.number_of_nodes()) + " " + str(G.number_of_edges())) f.write("\n") for line in nx.generate_edgelist(G, data=False): print(line) f.write("e " + line + "\n")
def print_mcl_input_file(bsr_graph, out_handle): """Print an MCL-formatted graph file""" for line in nx.generate_edgelist(bsr_graph, delimiter='\t', data=['bsr']): out_handle.write(line+'\n')
def convertGraphToNet(G, outfile): with open(outfile) as f: for e in nx.generate_edgelist(G): print e
def dmax(G): out = "p edge " + str(G.number_of_nodes()) + " " + str(G.number_of_edges()) + "\n" for line in nx.generate_edgelist(G, data=False): out += "e " + line + "\n" return out
def topology(): if os.path.exists("mininetCode.py"): os.system("rm mininetCode.py") os.system('echo "from mininet.net import Mininet" >> %s' % outputFileName) os.system('echo "from mininet.node import Controller, OVSKernelSwitch" >> %s' % outputFileName) os.system('echo "from mininet.cli import CLI" >> %s' % outputFileName) os.system('echo "from mininet.log import setLogLevel" >> %s' % outputFileName) os.system('echo "from mininet.link import TCLink" >> %s' % outputFileName) os.system('echo "\ndef topology():" >> %s' % outputFileName) removeNodes = [] filename = raw_input("Please enter the filename: ") net = Mininet(controller=Controller, link=TCLink, switch=OVSKernelSwitch) os.system( 'echo "\n net = Mininet( controller=Controller, link=TCLink, switch=OVSKernelSwitch )" >> %s\n\n' % (outputFileName) ) excludedLine = 0 i = 0 k = 1 j = 0 node = [[0 for x in xrange(50)] for x in xrange(333300)] e1 = open(str(filename), "r") # read info file A = nx.Graph() for line in e1: count = len(line.strip().split(" ")) if line.strip().split(" ")[count - 1] == str(26121) or line.strip().split(" ")[count - 1] == str(20121): excludedLine += 1 else: while i <= count: if i == 0: node[j][i] = 26121 elif (i > 0) and (node[j][k - 1] != int(line.strip().split(" ")[i - 1])): node[j][k] = int(line.strip().split(" ")[i - 1]) A.add_edge(node[j][k - 1], node[j][k]) k += 1 i += 1 i = 0 k = 1 j += 1 Degree = int(raw_input("Please enter the number of degree which you want to remove: ")) for x in A.nodes(): if len(nx.shortest_path(A, source=26121, target=x)) > Degree: removeNodes.append(x) hosts = raw_input("Please enter the ASN to add host (you can type for multiple: 100,101,102): ") hosts = hosts.split(",") A.remove_nodes_from(removeNodes) asn = [0 for x in xrange(len(A.nodes()))] x = 0 os.system("echo \"\n print '*** Creating %s nodes ***'\" >> %s" % (len(A.nodes()), outputFileName)) for switch in A.nodes(): asn[x] = net.addSwitch("ASN%s" % switch) os.system("echo \" ASN%s = net.addSwitch( 'ASN%s' )\" >> %s" % (switch, switch, outputFileName)) x += 1 h = [] for host in hosts: for node in A.nodes(): if str(host) == str(node): os.system("echo \" h%s = net.addHost( 'h%s' )\" >> %s" % (str(host), str(host), outputFileName)) A.add_edge(node, str("h" + str(host))) h.append(str("h" + str(host))) os.system("echo \"\n c1 = net.addController( 'c1' )\" >> %s" % (outputFileName)) os.system("echo \"\n print '*** Creating Links ***'\" >> %s" % (outputFileName)) for line in nx.generate_edgelist(A, data=False, delimiter=","): line = line.split(",") hops = 0 higherHop = 0 if (line[0][:1] != "h") and (line[1][:1] != "h"): if line[0] != 26121: distance = len(nx.shortest_path(A, source=26121, target=int(line[0]))) - 1 higherHop = distance if line[1] != 26121: if higherHop < len(nx.shortest_path(A, source=26121, target=int(line[1]))) - 1: hops = len(nx.shortest_path(A, source=26121, target=int(line[1]))) - 1 # Link Configuration if hops < 3: bw = 1000 else: bw = 100 delay = str(10 + (hops * 2)) + "ms" if line[0][:1] == "h": os.system("echo \" net.addLink('%s','ASN%s')\" >> %s" % (line[0], line[1], outputFileName)) elif line[1][:1] == "h": os.system("echo \" net.addLink('ASN%s','%s')\" >> %s" % (line[0], line[1], outputFileName)) else: os.system( "echo \" net.addLink('ASN%s','ASN%s', bw=%s, delay='%s')\" >> %s" % (line[0], line[1], bw, delay, outputFileName) ) os.system("echo \"\n print '*** Starting network ***'\" >> %s" % (outputFileName)) os.system('echo " net.build()" >> %s' % (outputFileName)) os.system('echo " c1.start()\n" >> %s' % (outputFileName)) for ases in asn: os.system('echo " %s.start( [c1] )" >> %s' % (ases, outputFileName)) os.system("echo \"\n print '*** Running CLI ***'\" >> %s" % (outputFileName)) os.system('echo " CLI( net )" >> %s' % (outputFileName)) os.system("echo \"\n print '*** Stopping network ***'\" >> %s" % (outputFileName)) os.system('echo " net.stop()" >> %s' % (outputFileName)) os.system("echo \"\nif __name__ == '__main__':\" >> %s" % (outputFileName)) os.system("echo \" setLogLevel( 'info' )\" >> %s" % (outputFileName)) os.system('echo " topology()" >> %s' % (outputFileName))
edges_with_weight = zip(rows.tolist(), cols.tolist(), weight) # Cria um grafo vazio usando NetworkX g = nx.Graph() # Insere primeiro os vértices nomeados set_edges = set(rows.tolist()) for i in range(0,len(set_edges)): g.add_node(i, name=labels.tolist()[i]) # Insere arestas g.add_weighted_edges_from(edges_with_weight) # Linka os nomes com seus respectivos vertices # Executa o algoritmo de Prim para extrair a MST do grafo edge_list = nx.generate_edgelist(g,data=['weight']) mst = nx.kruskal_mst(g) # Plota resultados, é necessário que seja salvo na mão. # Opção de layout de grafo usado foi o Fruchterman & Reingold plt.figure(1, figsize=(15,15)) # Cria figura para desenhar grafo: 15 eh a dimensao da imagem #nx.draw_spring(mst, node_size=350, font_size=10, edge_width=1, alpha=0.5, arrows=False, with_labels=True) # Coordenadas mantém as coordenadas dos vértices do grafo coordinates = nx.spring_layout(mst) # Salva a primeira imagem nx.draw(mst, coordinates, node_size=350, font_size=10, edge_width=1, alpha=0.5, arrows=False, with_labels=True) plt.savefig(dataset+'.png') # Outros formatos: pdf, svg, ... plt.show()
def print_edgelist(self): assert(self.nx_graph is not None) for line in nx.generate_edgelist(self.nx_graph, data=False): print(line)
graphs,all_simple_paths = build_graphs(adjacency_matrix,num_row,num_col,num_plane) #Equivalent number of cells per subset. cell_dist = [] for i in range(0,num_subsets): cell_dist.append(4096) num_total_cells = sum(cell_dist) graphs = add_edge_cost(graphs,num_total_cells,global_subset_boundaries,cell_dist,t_u,upc,upbc,t_comm,latency,m_l,num_row,num_col,num_plane) ##Storing all simple paths for each graph. #all_simple_paths = [] #for graph in graphs: # copy_graph = copy(graph) # start_node = [x for x in copy_graph.nodes() if copy_graph.in_degree(x) == 0][0] # end_node = [x for x in copy_graph.nodes() if copy_graph.out_degree(x) == 0][0] # simple_paths = nx.all_simple_paths(graph,start_node,end_node) # all_simple_paths.append(simple_paths) graphs = add_conflict_weights(graphs,all_simple_paths,latency,cell_dist,num_row,num_col,num_plane) all_graph_time,time,heaviest_paths = compute_solve_time(graphs,cell_dist,t_u,upc,global_subset_boundaries,num_row,num_col,num_plane) print(all_graph_time) for ig in range(0,len(graphs)): for line in nx.generate_edgelist(graphs[ig],data=True): print(line) print("\n")
def _monitor_thread(self): bdisp = None ddisp = None freq = self.hz * self.motion_window mwin = collections.deque([False] * freq) n = 1 while True: fdata = None if self._exit: return try: fdata = self._q.get(True, 0.3) except: continue if self.display and bdisp is None: ndisp, _ = pxdisplay.create(caption="Normal", width=80) bdisp, _ = pxdisplay.create(caption="Average", width=80) sdisp, _ = pxdisplay.create(caption="Std Dev", width=80, tmin=0, tmax=0.5) ddisp, _ = pxdisplay.create(caption="Deviation", width=80) frame = fdata['ir'] mwin.popleft() mwin.append(fdata['movement']) if self.motion_override is not None: motion = self.motion_override else: motion = any(mwin) self._lock.acquire() self._active = [] g = nx.Graph() if n == 1: self._background = tuple_to_list(frame) self._means = tuple_to_list(frame) self._stds = init_arr(0) self._stds_post = init_arr() else: weight = self.nomotion_weight use_frame = frame # Not currently working #if motion: # indeces = min_temps(frame, 5) # scalepx = [] # # for i, j in indeces: # scalepx.append(self._background[i][j] / frame[i][j]) # # scale = sum(scalepx) / len(scalepx) # scaled_bg = [[x * scale for x in r] for r in frame] # # weight = self.motion_weight # use_frame = scaled_bg for i in range(self._rows): for j in range(self._columns): prev = self._background[i][j] cur = use_frame[i][j] cur_mean = self._means[i][j] cur_std = self._stds[i][j] if not motion: # TODO: temp fix self._background[i][j] = weight * cur + (1 - weight) * prev # maybe exclude these from motion calculations? # n doesn't change when in motion, so it'll cause all sort of corrupted results, as they use n? self._means[i][j] = cur_mean + (cur - cur_mean) / n self._stds[i][j] = cur_std + (cur - cur_mean) * (cur - self._means[i][j]) self._stds_post[i][j] = math.sqrt(self._stds[i][j] / (n-1)) if (cur - self._background[i][j]) > (3 * self._stds_post[i][j]): self._active.append((i,j)) g.add_node((i,j)) x = [(-1, -1), (-1, 0), (-1, 1), (0, -1)] # Nodes that have already been computed as active for ix, jx in x: if (i+ix, j+jx) in self._active: g.add_edge((i,j), (i+ix,j+jx)) active = self._active self._num_active = len(self._active) components = list(nx.connected_components(g)) self._connected_graph = g self._num_connected = nx.number_connected_components(g) self._size_connected = max(len(component) for component in components) if len(components) > 0 else None self._lock.release() active_pix = None if self.display: ndisp.put({'ir': frame}) bdisp.put({'ir': self._background}) sdisp.put({'ir': self._stds_post}) #print(self._stds_post) if n >= 2: std = {'ir': init_arr(0)} for i, j in active: std['ir'][i][j] = frame[i][j] ddisp.put(std) active_pix = std while self.freeze: time.sleep(0.5) pass if self.draw: #nx.draw(g) #plt.show() print('########################') print('Edgelist') for l in nx.generate_edgelist(g, data=False): print(l) print('Frame') print(frame) print('Average') print(self._background) print('Stddev') print(self._stds_post) print('Deviation') print(active_pix) while True: pass if not motion: n += 1
def convert(path, output_path, cut_to_n_nodes, max_edges, max_edges_2, voltage): # read edge list from file g = nx.read_weighted_edgelist(path, nodetype=int) # generate some random resistance for (x, y) in nx.edges(g): g[x][y]['weight'] = random.randint(1, 10) # let g be the biggest consistent subgraph (in the best case, # the whole graph is consistent) # g = max(nx.connected_component_subgraphs(g), key=len) # cycles = nx.cycle_basis(g) # nodes = [x for cycle in cycles for x in cycle] # g.remove_nodes_from([x for x in nx.nodes(g) if x not in nodes]) x1, x2 = nx.nodes(g)[:2] nodes_set = {x1, x2} nodes_set_left = {x1, x2} i = 2 while i < cut_to_n_nodes: for node in nx.nodes(g)[2:]: if node in nodes_set: continue intersect = set(nx.neighbors(g, node)).intersection(nodes_set_left) if len(intersect) >= 2: i += 1 while len(intersect) > 2: g.remove_edge(node, intersect.pop()) nodes_set.add(node) print(node) if len(intersect) < max_edges: nodes_set_left.add(node) for neighbour in intersect: if len(set(nx.neighbors(g, neighbour)).intersection(nodes_set)) >= max_edges: nodes_set_left.remove(neighbour) for neighbour2 in nx.neighbors(g, neighbour): if neighbour2 not in nodes_set: g.remove_edge(neighbour, neighbour2) if i == cut_to_n_nodes: break unused = [node for node in nx.nodes(g) if node not in nodes_set] g.remove_nodes_from(unused) # remove excess edges for node in nx.nodes(g): neighbours = tuple(nx.all_neighbors(g, node)) overmax = len(neighbours) - max_edges_2 if overmax > 0: i = 0 for neighbour in neighbours: if len(tuple(nx.all_neighbors(g, neighbour))) == 2: continue backup = g[node][neighbour] g.remove_edge(node, neighbour) if not nx.has_path(g, node, neighbour): g.add_edge(node, neighbour, backup) else: i += 1 if i == overmax: break # save to file lines = [x + "\n" for x in nx.generate_edgelist(g, data=['weight'])] o = open(output_path, "w") # x1, x2 = nx.nodes(g)[:2] o.write(" ".join((str(x1), str(x2), str(voltage))) + "\n") o.writelines(lines)
u = int(lineData[0]) v = int(lineData[1]) w = float(lineData[2][:-1]) if w <= maxDistance: G.add_edge(u,v,weight = w) dataLine = edgeList.readline() edgeList.close() # create subgraphs from this graph subGs = nx.connected_component_subgraphs(G) msg("The graph contains %d subgraph(s)" %len(subGs)) # Initiate the output edge list msg("Initializing the output edge file") outFile = open(outputFN,'w') outFile.write("FromID,ToID,Cost,Subgraph\n") # For each subgraph, assign edges in that subgraph with subgraph ID cnt = 0 for subG in subGs: cnt += 1 # for each edge, add subgraph ID for line in nx.generate_edgelist(subG, data=['weight']): fromPatch = int(line.split(" ")[0]) toPatch = int(line.split(" ")[1]) cost = float(line.split(" ")[2]) # write to output file outFile.write("%d,%d,%s,%d\n" %(fromPatch,toPatch,cost,cnt)) arcpy.CheckInExtension("spatial")
import networkx as nx import networkx.readwrite as rw g = nx.Graph() g.add_edge(1,2, weight=3) g.add_edge(1,3, weight=1) g.add_edge(2,4, weight=67) g.add_edge(2,5, weight=1) with open('edgelist.txt', 'w') as f: f.write('\n'.join(list(nx.generate_edgelist(g, data=True))))
def edge_list(graphfile, data_option, stat_option, print_write="pass", for_label="no_label", load="graphfile"): if load == "graphfile": #Load multigraph file G=nx.read_graphml(str(graphfile)) elif load == "loaded_Graph": G=graphfile else: print "Invalid load option. Please leave blank and specify a graphfile to read or provide a preloaded graphfile and select the option 'loaded_Graph'." data = data_option stat = stat_option.lower() print_option = print_write.lower() #Print edges (with no data labels) if data==False: if print_write == "pass": pass else: if print_write == "print": for line in nx.generate_edgelist(G, data=False): print(line) pass elif print_write == "write": try: outfile = 'Edge_List__'+graphfile+".csv" f=open(outfile, 'a') for line in nx.generate_edgelist(G, data=False): delimited_line = line.replace(' ', ',') f.write(u'%s\n' % (delimited_line)) finally: f.close() pass else: pass #Print edges (with data labels) elif data ==True: if print_write == "pass": pass else: if for_label=="no_label": if print_write == "print": for line in nx.generate_edgelist(G): print(line) pass elif print_write == "write": try: outfile = 'Edge_List__'+graphfile+".csv" f=open(outfile, 'a') for line in nx.generate_edgelist(G): delimited_line = line.replace(' ', ',') f.write(u'%s\n' % (delimited_line)) finally: f.close() pass elif print_write == "w2": try: outfile = 'Edgelist-W2__'+graphfile+".csv" f=open(outfile, 'a') for line in nx.generate_edgelist(G): delimited_line = line.replace(' ', ' ').replace(',', ' ') f.write(u'%s\n' % (delimited_line)) finally: f.close() pass elif for_label!="no_label": if print_write == "print": for line in nx.generate_edgelist(G, data=[str(for_label)]): print(line) pass elif print_write == "write": try: outfile = 'Edge_List__'+graphfile+".csv" f=open(outfile, 'a') for line in nx.generate_edgelist(G, data=[str(for_label)]): delimited_line = line.replace(' ', ',') f.write(u'%s\n' % (delimited_line)) finally: f.close() pass else: pass #Print Stat Option if stat=="yes": print '\n'"MULTIGRAPH STATISTICS"'\n', '_'*20, '\n', "Nodes: ", G.number_of_nodes(), '\n', "Edges: ", G.size(), '\n', '_'*20, '\n' elif stat=="e1": print "Edges: ", G.size() elif stat=="no": pass else: print "Please input a valid option. Would you like to return the number of edges: 'yes' or 'no'?"
faster_path = False if (faster_path): #How much faster it starts solving than the secondary path. delay = weight_sum_path - weight_sum #Get the time to solve this node. time_to_solve = G[current_node][primary_path[i+1]]['weight'] delay = time_to_solve - delay #Check if delay is positive, then we need to add weight to the secondary graph. if (delay > 0): #We add this delay to the node 1's solve time in the secondary graph. next_node = path[node_position+1] G2[current_node][next_node]['weight'] += delay else: delay = weight_sum - weight_sum_path time_to_solve = G2[current_node][path[node_position+1]]['weight'] delay = time_to_solve - delay if (delay > 0): next_node = primary_path[i+1] G[current_node][next_node]['weight'] += delay for line in nx.generate_edgelist(G,data=True): print(line) print("\n") print("G2\n") for line in nx.generate_edgelist(G2,data=True): print(line)
edgeW = open("F:\Web_Science\cs532-s16\A5\Wedges.txt","a") f = open("zachary2.txt","r") g = nx.Graph() d = np.genfromtxt(f, delimiter=' ') for i in range(0,34): for j in range(0,34): if d[i][j]== 0: pass else: g.add_edge(i,j,weight=d[i][j]) esmall =[(u,v) for (u,v,d) in g.edges(data=True) if d['weight'] <=2.0] emed =[(u,v) for (u,v,d) in g.edges(data=True) if d['weight'] <=4.0] elarge =[(u,v) for (u,v,d) in g.edges(data=True) if d['weight'] >=5.0] plt.figure(figsize=(8,8)) pos=nx.spring_layout(g) nx.draw_networkx_nodes(g,pos,node_size=500) nx.draw_networkx_edges(g,pos,edgelist=elarge,width=2,edge_color='r') nx.draw_networkx_edges(g,pos,edgelist=emed,width=2,edge_color='g') nx.draw_networkx_edges(g,pos,edgelist=esmall,width=2,edge_color='b') labels={} for i in range(0,34): labels[i] = i nx.draw_networkx_labels(g,pos,labels,font_size=16) plt.axis('off') for line in nx.generate_edgelist(g): edgeW.write(line +'\n') plt.savefig("F:\Web_Science\cs532-s16\A5\kEdges.png") plt.show()