def test_two_clique_communities(): random.seed(7) test = Graph() # c1 test.add_edge('a', 'b') test.add_edge('a', 'c') test.add_edge('b', 'c') # connection test.add_edge('c', 'd') # c2 test.add_edge('d', 'e') test.add_edge('d', 'f') test.add_edge('f', 'e') # ground truth ground_truth = set( [frozenset(['a', 'c', 'b']), frozenset(['e', 'd', 'f'])]) communities = asyn_fluidc(test, 2) result = {frozenset(c) for c in communities} assert_equal(result, ground_truth)
def test_incorrect_interior(self): graph = Graph() e1, e2, e3, e4, e5 = [gen_name() for _ in range(5)] graph.add_node(e1, layer=1, position=(1.0, 2.0), label='I') graph.add_node(e2, layer=1, position=(1.0, 1.5), label='E') graph.add_node(e3, layer=1, position=(1.0, 1.0), label='E') graph.add_node(e4, layer=1, position=(2.0, 1.0), label='E') graph.add_node(e5, layer=1, position=(1.5, 1.5), label='E') graph.add_edge(e1, e2) graph.add_edge(e2, e3) graph.add_edge(e3, e4) graph.add_edge(e4, e5) graph.add_edge(e5, e1) if visualize_tests: visualize_graph_3d(graph) pyplot.show() with self.assertRaises(AssertionError): P4().apply(graph, [e1]) if visualize_tests: visualize_graph_3d(graph) pyplot.show()
def __init__(self, model_name: str): self.name = model_name self.directed_net = DiGraph() self.directed_net.name = model_name self.undirected_net = Graph() self.undirected_net.name = model_name
def generate_wMVC_summ(input_path: str, reference_path: str, output_path: str, limit: int): for doc in listdir(input_path): with open(input_path + doc, 'r') as f: txt = f.read() if txt == '' or txt == 'None.': open(output_path + doc, 'w', encoding='utf8').write('') continue sent_tokens, factors = get_sentences_with_factors(input_path, reference_path, doc) if document_word_count(sent_tokens) < 250: open(output_path + doc, 'w', encoding='utf8').write('\n\n'.join(sent_tokens)) continue dist = compute_entailment(sent_tokens) threshold = compute_threshold(dist, 0.5) large = 2 * max(factors) * len(sent_tokens) vertices = [(i, {'weight': large - factors[i] * (sum(dist[i]) - 1)}) for i in range(len(sent_tokens))] edges = create_edge_set(dist, threshold) G = Graph() G.add_nodes_from(vertices) G.add_edges_from(edges) wMVC = min_weighted_vertex_cover(G, 'weight') tops = sorted([i for i in wMVC], key=lambda x: vertices[x][1]['weight']) write_summary(output_path, doc, limit, sent_tokens, tops)
def test_recursive_kl_error(): """ Test if ValueError is called if numgrp is not a power of 2 and not at least 2 """ G = Graph() with pytest.raises(ValueError) as excinfo: group_graph.recursive_kl(G, numgrp=1) exception_msg = excinfo.value.args[0] assert exception_msg == "numgrp must be a power of 2 and at least 2."
def get_full_graph(paths: [str], floors: [int]) -> Graph: """ Возвращает объединённый из svg файлов граф. Этажи соединены лестницами. Порядок номеров этажей должен совпадать с порядком этажей в svg файлах. :param paths: Список svg файлов :param floors: Список номеров этажей. :return: """ G = Graph() for i in range(len(paths)): G = nx.union(G, get_from_svg(paths[i], floors[i])) for letter in ['a', 'b', 'v', 'g', 'd', 'e', 'j']: for floor in floors: first = get_node_by_id( G, 'stairs_' + letter + '_' + str(floor) + '_start') second = get_node_by_id( G, 'stairs_' + letter + '_' + str(floor + 1) + '_start') if first is not None and second is not None: G.add_edge(first, second) first = get_node_by_id( G, 'stairs_' + letter + '_' + str(floor) + '_end') second = get_node_by_id( G, 'stairs_' + letter + '_' + str(floor + 1) + '_end') if first is not None and second is not None: G.add_edge(first, second) return G
def createCorrectGraph(): graph = Graph() e1 = gen_name() e2 = gen_name() i1 = gen_name() i2 = gen_name() I1 = gen_name() I2 = gen_name() e3 = gen_name() e4 = gen_name() e5 = gen_name() e6 = gen_name() e7 = gen_name() I3 = gen_name() graph.add_node(e1, layer=1, position=(1.0, 2.0), label='E') graph.add_node(e2, layer=1, position=(3.0, 2.0), label='E') graph.add_node(i1, layer=1, position=(2.0, 3.0), label='i') graph.add_node(i2, layer=1, position=(2.0, 1.0), label='i') graph.add_node(I1, layer=2, position=(1.5, 3.5), label='I') graph.add_node(I2, layer=2, position=(2.5, 3.5), label='I') graph.add_node(e3, layer=2, position=(1.0, 2.0), label='E') graph.add_node(e4, layer=2, position=(3.0, 2.0), label='E') graph.add_node(e5, layer=2, position=(2.0, 2.0), label='E') #graph.add_node(I3, layer=2, position=(1.5, 0.5), label='I') czy napewno te wspolrzedne? spr graph.add_node(I3, layer=2, position=(2.5, 0.5), label='I') graph.add_node(e6, layer=2, position=(1.0, 2.0), label='E') graph.add_node(e7, layer=2, position=(3.0, 2.0), label='E') # upper layer edges graph.add_edge(e1, i1) graph.add_edge(e1, i2) graph.add_edge(e2, i1) graph.add_edge(e2, i2) graph.add_edge(e1, e2) # interlayer connections graph.add_edge(I1, i1) graph.add_edge(I2, i1) graph.add_edge(I3, i2) # lower layer connections graph.add_edge(I1, e3) graph.add_edge(I1, e5) graph.add_edge(e3, e5) graph.add_edge(I2, e4) graph.add_edge(I2, e5) graph.add_edge(e4, e5) #lower layer triangle graph.add_edge(I3, e6) graph.add_edge(I3, e7) graph.add_edge(e6, e7) return graph
def multiple_edges(self, new): """ Get/set whether or not self allows multiple edges. INPUT: new: boolean or None DOCTEST: sage: G = sage.graphs.base.graph_backends.NetworkXGraphBackend() sage: G.multiple_edges(True) sage: G.multiple_edges(None) True """ try: assert(not isinstance(self._nxg, (NetworkXGraphDeprecated, NetworkXDiGraphDeprecated))) except AssertionError: self._nxg = self._nxg.mutate() from networkx import Graph,MultiGraph,DiGraph,MultiDiGraph if new is None: return self._nxg.is_multigraph() if new == self._nxg.is_multigraph(): return if new: if self._nxg.is_directed(): self._nxg = MultiDiGraph(self._nxg) else: self._nxg = MultiGraph(self._nxg) else: if self._nxg.is_directed(): self._nxg = DiGraph(self._nxg) else: self._nxg = Graph(self._nxg)
def cache_langs(): ''' Read in all files and save as pickle ''' langs = {} dir_path = Path(LANGS_DIR) # Sort by language code paths = sorted(dir_path.glob('./*/config.y*ml'), key=lambda x: x.parent.stem) mappings_legal_pairs = [] for path in paths: code = path.parent.stem with open(path) as f: data = yaml.safe_load(f) # If there is a mappings key, there is more than one mapping # TODO: should put in some measure to prioritize non-generated mappings and warn when they override if 'mappings' in data: for index, mapping in enumerate(data['mappings']): mappings_legal_pairs.append( (data['mappings'][index]['in_lang'], data['mappings'][index]['out_lang'])) data['mappings'][index] = load_mapping_from_path(path, index) else: data = load_mapping_from_path(path) langs = {**langs, **{code: data}} lang_network = Graph() lang_network.add_edges_from(mappings_legal_pairs) with open(LANGS_NWORK_PATH, 'wb') as f: write_gpickle(lang_network, f) with open(LANGS_PKL, 'wb') as f: pickle.dump(langs, f) return langs
def downgrade(self): """ Downgrade hypergraph diagram to :class:`discopy.rigid.Diagram`. Examples -------- >>> x = Ty('x') >>> v = Box('v', Ty(), x @ x) >>> print((v >> Swap(x, x) >> v[::-1]).downgrade()) v >> Swap(x, x) >> v[::-1] >>> print((Id(x) @ Swap(x, x) >> v[::-1] @ Id(x)).downgrade()) Id(x) @ Swap(x, x) >> v[::-1] @ Id(x) """ diagram = self.make_progressive() graph = Graph() graph.add_nodes_from(diagram.ports) graph.add_edges_from([(diagram.ports[i], diagram.ports[j]) for i, j in enumerate(diagram.bijection)]) graph.add_nodes_from([ Node("box", depth=depth, box=box if isinstance(box, rigid.Box) else rigid.Box( box.name, box.dom, box.cod, _dagger=box.is_dagger, data=box.data)) for depth, box in enumerate(diagram.boxes) ]) graph.add_nodes_from([ Node("box", depth=len(diagram.boxes) + i, box=rigid.Spider(0, 0, diagram.spider_types[s])) for i, s in enumerate(diagram.scalar_spiders) ]) return drawing.nx2diagram(graph, rigid.Ty, rigid.Id)
def get_spot_prices_in_eth_from_dune(swaps, pool_ids, token_info, block_interval = 1): from_block = min(s['block_number'] for s in swaps) to_block = max(s['block_number'] for s in swaps) reserves = get_reserves_from_dune(from_block - 1, to_block, pool_ids, token_info) reserves = compute_reserves_for_blocks(range(from_block, to_block + 1), reserves) token_graph = Graph(list(reserves.keys())) for t1, t2 in reserves.keys(): block = list(reserves[t1, t2].keys())[0] token_graph[t1][t2]['weight'] = -reserves[t1, t2][block][0] * reserves[t1, t2][block][1] WETH = "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2" tokens = {t1 for (t1, _) in reserves.keys()} | {t2 for (_, t2) in reserves.keys()} tokens = {t for t in tokens if has_path(token_graph, t, WETH)} exchange_paths = {t: shortest_path(token_graph, t, WETH) for t in tokens} prices = {} for block in tqdm(range(from_block, to_block + 1, block_interval), "Setting spot prices for every block"): block_prices = {} for t in tokens: xrate = compute_exchange_rate(exchange_paths[t], reserves, block) if xrate is None: continue block_prices[t] = xrate prices[block] = block_prices return prices
def edgelist_to_graph_eqs_forbidden(edge_list): W = 'weight' edges = [(e, v, {W: 1}) for e, v in grouper(edge_list.split(), 2)] forbidden = set() g = Graph(edges) eqs = [n for n in g if n[0] == 'e'] return g, eqs, forbidden
def wsi(ego, topn=TOPN): tic = time() ego_network = Graph(name=ego) pairs = get_disc_pairs(ego, topn) nodes = get_nodes(pairs) ego_network.add_nodes_from([(node, { 'size': size }) for node, size in nodes.items()]) for r_node in ego_network: related_related_nodes = list2dict(get_nns(r_node)) related_related_nodes_ego = sorted( [(related_related_nodes[rr_node], rr_node) for rr_node in related_related_nodes if rr_node in ego_network], reverse=True)[:topn] related_edges = [] for w, rr_node in related_related_nodes_ego: if get_pair(r_node, rr_node) not in pairs: related_edges.append((r_node, rr_node, {"weight": w})) else: print("Skipping:", r_node, rr_node) ego_network.add_edges_from(related_edges) chinese_whispers(ego_network, weighting="top", iterations=20) if verbose: print("{}\t{:f} sec.".format(ego, time() - tic)) return {"network": ego_network, "nodes": nodes}
def shortest_paths(graph: Graph): """ Returns the shortest paths tree from the node with the highest betweenness centrality among trees. :param graph: graph to consider :return: shortest path tree """ betweenness = nx.betweenness_centrality(graph) center = max(betweenness, key=betweenness.get) distances, paths = nx.algorithms.shortest_paths.single_source_dijkstra( graph, center) edges = set() for path in paths.values(): prev = None for node in path: if prev is not None: edges.add((prev, node, graph[prev][node]['weight'])) prev = node new = Graph() if edges: for edge in edges: new.add_edge(edge[0], edge[1], weight=edge[2]) else: new.add_node(center) return new
def add_layer(self, layer, **attr): if self.num_nodes_in_layers is 0: self.list_of_layers = [layer] else: self.list_of_layers.append(layer) self.num_layers = len(self.list_of_layers) self.num_nodes_in_layers = self.list_of_layers[0].number_of_nodes() for i, j in layer.edges(): self.intra_layer_edges.append( (i + (len(self.list_of_layers) - 1) * layer.number_of_nodes(), j + (len(self.list_of_layers) - 1) * layer.number_of_nodes())) try: Graph.__init__( self, Graph(disjoint_union_all(self.list_of_layers), **attr)) except multinetxError: raise multinetxError("Multiplex cannot inherit Graph properly") # Check if all graphs have the same number of nodes for lg in self.list_of_layers: try: assert (lg.number_of_nodes() == self.num_nodes_in_layers) except AssertionError: raise multinetxError("Graph at layer does not have the\ same number of nodes")
def test_module_example(self, tol): """Test the example in the QAOA module docstring""" # Defines the wires and the graph on which MaxCut is being performed wires = range(3) graph = Graph([(0, 1), (1, 2), (2, 0)]) # Defines the QAOA cost and mixer Hamiltonians cost_h, mixer_h = qaoa.maxcut(graph) # Defines a layer of the QAOA ansatz from the cost and mixer Hamiltonians def qaoa_layer(gamma, alpha): qaoa.cost_layer(gamma, cost_h) qaoa.mixer_layer(alpha, mixer_h) # Repeatedly applies layers of the QAOA ansatz def circuit(params, **kwargs): for w in wires: qml.Hadamard(wires=w) qml.layer(qaoa_layer, 2, params[0], params[1]) # Defines the device and the QAOA cost function dev = qml.device('default.qubit', wires=len(wires)) cost_function = qml.VQECost(circuit, cost_h, dev) res = cost_function([[1, 1], [1, 1]]) expected = -1.8260274380964299 assert np.allclose(res, expected, atol=tol, rtol=0)
def __init__(self, y): # Pre-cache a sparse LU decomposition of the FL matrix from pygfl.utils import get_1d_penalty_matrix from scipy.sparse.linalg import factorized from scipy.sparse import csc_matrix D = get_1d_penalty_matrix(y.shape[0]) D = np.vstack([D, np.zeros(y.shape[0])]) D[-1,-1] = 1e-6 # Nugget for full rank matrix D = csc_matrix(D) self.invD = factorized(D) # Setup the fast GFL solver from pygfl.solver import TrailSolver from pygfl.trails import decompose_graph from pygfl.utils import hypercube_edges, chains_to_trails from networkx import Graph edges = hypercube_edges(y.shape) g = Graph() g.add_edges_from(edges) chains = decompose_graph(g, heuristic='greedy') ntrails, trails, breakpoints, edges = chains_to_trails(chains) self.solver = TrailSolver() self.solver.set_data(y, edges, ntrails, trails, breakpoints) from pygfl.easy import solve_gfl self.beta = solve_gfl(y)
def define_graph(): G = Graph() for i in edges: G.add_edge(i[0], i[1]) G.add_edge(i[1], i[0]) pos = nx.spring_layout(G) return G, pos
def summarize(text, sentence_count=5, language='english'): processor = LanguageProcessor(language) sentence_list = processor.split_sentences(text) wordset_list = map(processor.extract_significant_words, sentence_list) stemsets = [ {processor.stem(word) for word in wordset} for wordset in wordset_list ] graph = Graph() pairs = combinations(enumerate(stemsets), 2) for (index_a, stems_a), (index_b, stems_b) in pairs: if stems_a and stems_b: similarity = 1 - jaccard(stems_a, stems_b) if similarity > 0: graph.add_edge(index_a, index_b, weight=similarity) ranked_sentence_indexes = list(pagerank(graph).items()) if ranked_sentence_indexes: sentences_by_rank = sorted( ranked_sentence_indexes, key=itemgetter(1), reverse=True) best_sentences = map(itemgetter(0), sentences_by_rank[:sentence_count]) best_sentences_in_order = sorted(best_sentences) else: best_sentences_in_order = range(min(sentence_count, len(sentence_list))) return ' '.join(sentence_list[index] for index in best_sentences_in_order)
def get_every_station_and_time(): try: # Ожидание прогрузки элементов страницы. some_elem_on_page = WebDriverWait(driver, 10).until( EC.element_to_be_clickable( (By.XPATH, "//button[@class='decor_button_button']"))) finally: all_elem = driver.find_elements_by_xpath("//td[@class='flag']/a") # Создание графа G = Graph() temp = None # Пройти по всем значениям станция/время for i in range(len(all_elem)): current_city = driver.find_element_by_xpath( "//tr[" + str(i + 1) + "]/td[@class='flag']/a").text current_time = driver.find_element_by_xpath( "//table[ @ id = 'schedule_table'] / tbody / tr[" + str(i + 1) + "] / td[3]").text # Наполнение графа(в разработке) if temp is None: G.add_node(current_city, Time=5) else: G.add_edge(temp, current_city, Time=6) temp = current_city print("Город - ", current_city, ";\t Время - ", current_time) driver.close()
def to_bipartite_graph(equations): '''Returns: g, eqs, forbidden. Here, g represents the system of equations as an undirected bipartite graph; eqs gives the node ids of the equations; the forbidden set contains the (eq,var) pairs, where variable var cannot be safely or explicitly eliminated from equation eq.''' g, eqs, forbidden = Graph(), set(), set() # for eq in gen_unit_eqs(equations): eqs.add(eq.id) for var in eq.names: g.add_edge(eq.id, var) if var not in eq.elims: forbidden.add((eq.id, var)) # for eq in gen_connection_eqs(equations): eqs.add(eq.id) for var in eq.names: g.add_edge(eq.id, var) # info_on_bipartite_graph(g, eqs, forbidden) #from plot import plot #plot(g, prog='sfdp') #from utils import serialize #serialize((g, eqs, forbidden), 'JacobsenILOSimpBounds.pkl.gz') return g, eqs, forbidden
def naughty_brute_force(): for size in irange(1, 6): print_timestamp() print('Testing (naughty) bipartite graphs of size', size) opts = marshal_load('data/all_bips/opt_n' + str(size) + '.bin') all_edgelists = marshal_load('data/all_bips/bip_n' + str(size) + '.bin') print('Loaded', len(all_edgelists), 'graphs') print_timestamp() for i, (edgelist, opt) in enumerate(izip(all_edgelists, opts)): g = Graph() g.add_edges_from(e for e in izip(edgelist[::2], edgelist[1::2])) g.graph['name'] = str(i) res = bb4_solve(g, set(irange(size))) assert opt == res.ub #to_pdf(g, rowp, colp) #print([t[0] for t in _worst_cases]) #print('Len:', len(_worst_cases)) #_worst_cases.sort(key=sort_patterns) # for i, (explored, g, _, rowp, colp, ub) in enumerate(_worst_cases, 1): # msg = 'Index: ' + g.graph['name'] # fname = '{0:03d}a'.format(i) # to_pdf(g, list(irange(size)), irange(size, 2*size), msg, fname) # msg = 'OPT = {}, BT: {}'.format(ub, explored) # fname = '{0:03d}b'.format(i) # to_pdf(g, rowp, colp, msg, fname) #_worst_cases[:] = [ ] print_timestamp() print()
def largest_connected_component(G): """ Input G: an n x n matrix or a networkx Graph Output The largest connected component of g """ if type(G) == np.ndarray: if G.ndim == 2: if G.shape[0] == G.shape[1]: # n x n matrix G = Graph(G) else: raise TypeError("Networkx graphs or n x n numpy arrays only") subgraphs = [ G.subgraph(i).copy() for i in networkx.connected_components(G) ] G_connected = [] for i in subgraphs: if len(i) > len(G_connected): G_connected = i return G_connected
def initialize_network(self, database_name): self.network = Graph() database_connection = connect(database_name) self.add_stations(database_connection) self.add_station_zone_assignments(database_connection) self.add_connections(database_connection) database_connection.close()
def pathways_to_similarity_network(manager_dict, pathways): """Create a graph with the given pathways related by their similarity :param dict manager_dict: :param list[tuple(str,str,str)] pathways: :rtype: networkx.Graph """ gene_set_dict = { identifier: manager_dict[resource].get_pathway_by_id(identifier).get_gene_set() for resource, identifier, name in pathways } graph = Graph() for (resource_1, identifier_1, name_1), (resource_2, identifier_2, name_2) in itt.combinations(pathways, r=2): similarity = calculate_szymkiewicz_simpson_coefficient( gene_set_dict[identifier_1], gene_set_dict[identifier_2]) if similarity == 0: continue graph.add_edge((resource_1, identifier_1, name_1), (resource_2, identifier_2, name_2), similarity=similarity) return graph
def _build_graph(self): self.graph = Graph() self.graph.add_nodes_from(self.sentences) for sent1, sent2 in combinations(self.sentences, 2): weight = self._jaccard(sent1, sent2) if weight: self.graph.add_edge(sent1, sent2, weight=weight)
def to_graph_eqs_forbidden(edge_list, equation_name_length=3): W = 'weight' edges = [ (e,v,{W:int(w)}) for e,v,w in grouper(edge_list.split(),3) ] forbidden = {(e,v) for e,v,d in edges if d[W] < 0} g = Graph(edges) eqs = [n for n in g if len(n)==equation_name_length] return g, eqs, forbidden
def from_profile(self, params: Dict[str, Any], setNameDict: bool =True): """Simple load by dict object.""" mapping = params['name_dict'] #Name dict. if setNameDict: self.setNameDict(mapping) #Add customize joints. G = Graph(params['Graph']) self.setGraph(G, params['pos']) self.cus = params['cus'] self.same = params['same'] #Grounded setting. if setNameDict: Driver = [mapping[e] for e in params['Driver']] Follower = [mapping[e] for e in params['Follower']] for row, link in enumerate(G.nodes): points = set( 'P{}'.format(n) for n, edge in edges_view(G) if link in edge ) if set(Driver + Follower) <= points: self.setGrounded(row) break #Expression for func, args, target in triangle_expr(params['Expression']): self.setStatus(params['name_dict'][target], True)
def get_ego_network(ego): tic = time() ego_network = Graph(name=ego) # Add related and substring nodes substring_nodes = [] for j, node in enumerate(G.index): if ego.lower() == node.lower(): nns_node = G.get_neighbors(node) ego_nodes = [(rn, {"weight": w}) for rn, w in nns_node.items()] ego_network.add_nodes_from(ego_nodes) else: if "_" not in node: continue if node.startswith(ego + "_") or node.endswith("_" + ego): substring_nodes.append((node, {"weight": WEIGHT_COEF})) ego_network.add_nodes_from(substring_nodes) # Find edges of the ego network for r_node in ego_network: related_related_nodes = G.get_neighbors(r_node) related_related_nodes_ego = sorted( [(related_related_nodes[rr_node], rr_node) for rr_node in related_related_nodes if rr_node in ego_network], reverse=True)[:n] related_edges = [(r_node, rr_node, { "weight": w }) for w, rr_node in related_related_nodes_ego] ego_network.add_edges_from(related_edges) chinese_whispers(ego_network, weighting="top", iterations=20) if verbose: print("{}\t{:f} sec.".format(ego, time() - tic)) return ego_network
def test_maximal_cliques(self): """Test maximal_cliques.""" G = Graph() G.add_edge('b','c') G.add_edge('b','d') G.add_edge('b','e') G.add_edge('b','f') G.add_edge('b','a') G.add_edge('a','c') G.add_edge('a','d') G.add_edge('a','e') G.add_edge('c','d') G.add_edge('c','e') G.add_edge('c','f') G.add_edge('c','g') G.add_edge('d','e') G.add_edge('d','g') G.add_edge('e','g') G.add_edge('f','g') # A clique of 'a, b, c, d, e' and some other edges. nodes = G.nodes() S, H = pClique.convert_graph_connectivity_to_sparse(G, nodes) i = nodes.index('a') tQ = pClique.grasp(S, H, 1, 5, i) c = [nodes[i] for i in tQ] print c self.assertTrue(set(c) == set(['a', 'b', 'c', 'd', 'e']))