def test_special_cases(self): for n, H in [ (0, nx.null_graph()), (1, nx.path_graph(2)), (2, nx.cycle_graph(4)), (3, nx.cubical_graph()), ]: G = nx.hypercube_graph(n) assert nx.could_be_isomorphic(G, H)
def could_be_isomorphic(g1, g2): """Check if two graphs could be isomorphic. For small graphs, this checks for true isomorphism. For large graphs, this uses `networkx.could_be_isomorphic`. """ if g1.number_of_nodes() < MAX_NODES: return nx.is_isomorphic(g1, g2) else: return nx.could_be_isomorphic(g1, g2)
def _is_isomorphic_with_timeout(g1, g2, node_match, edge_match): try: with timeout(seconds=ISOMORPHISM_TIMEOUT): result = nx.is_isomorphic(g1, g2, node_match=node_match, edge_match=edge_match) return result, True except TimeoutError: result = nx.could_be_isomorphic(g1, g2) return result, False
def test_periodic(self): G = nx.grid_2d_graph(0, 0, periodic=True) assert_equal(dict(G.degree()), {}) for m, n, H in [(2, 2, nx.cycle_graph(4)), (1, 7, nx.cycle_graph(7)), (7, 1, nx.cycle_graph(7)), (2, 5, nx.circular_ladder_graph(5)), (5, 2, nx.circular_ladder_graph(5)), (2, 4, nx.cubical_graph()), (4, 2, nx.cubical_graph())]: G = nx.grid_2d_graph(m, n, periodic=True) assert_true(nx.could_be_isomorphic(G, H))
def test_ppc_to_nx(iterations=2000): for i in range(iterations): grid = pp.case118() all_branches = grid['branch'][:, 0:2] branches = sorted(random.sample(range(1, 187), random.randint(0, 186))) del_branches = sorted(list(set(range(1, 187)) - set(branches))) branches = [1, 2, 3] del_branches = list(range(4, 187)) grid['branch'][np.array(del_branches, dtype=int) - 1, idx_brch.BR_X] = np.inf graph = nx.Graph() graph.add_nodes_from(range(1, 119)) graph.add_edges_from(all_branches[np.array(branches) - 1].astype(int)) assert (nx.could_be_isomorphic(ppc_to_nx(grid), graph))
def test_special_cases(self): for n, H in [(0, nx.null_graph()), (1, nx.path_graph(2)), (2, nx.cycle_graph(4)), (3, nx.cubical_graph())]: G = nx.hypercube_graph(n) assert_true(nx.could_be_isomorphic(G, H))
def systemSym(self, cktIdx, dirName): # Adding fix for local graph generation self.graph = nx.Graph() self.circuitNodes = dict() self.constructGraph(cktIdx) self.graphSim = GraphSim.GraphSim(self.graph) # ckt = self.dDB.subCkt(cktIdx) cktNodes = range(ckt.numNodes()) symVal = dict() symPair = dict() for nodeIdxA, nodeIdxB in combinations(cktNodes, 2): nodeA = ckt.node(nodeIdxA) nodeB = ckt.node(nodeIdxB) cktA = self.dDB.subCkt(nodeA.graphIdx) cktB = self.dDB.subCkt(nodeB.graphIdx) #boxA = (cktA.gdsData().bbox().xLen(), cktA.gdsData().bbox().yLen()) #boxB = (cktB.gdsData().bbox().xLen(), cktB.gdsData().bbox().yLen()) boxA = (cktA.layout().boundary().xLen(), cktA.layout().boundary().yLen()) boxB = (cktB.layout().boundary().xLen(), cktB.layout().boundary().yLen()) subgraphA = self.subgraph(cktIdx, nodeIdxA) subgraphB = self.subgraph(cktIdx, nodeIdxB) # Boundary box size check and circuit graph isomorphic check if boxA == boxB and nx.could_be_isomorphic(subgraphA, subgraphB): if nodeIdxA not in symVal: symVal[nodeIdxA] = dict() symVal[nodeIdxA][nodeIdxB] = self.graphSim.specSimScore( subgraphA, subgraphB) if nodeIdxB not in symVal: symVal[nodeIdxB] = dict() symVal[nodeIdxB][nodeIdxA] = symVal[nodeIdxA][nodeIdxB] print "Recognized symmetry pair:" print nodeA.name, nodeB.name, symVal[nodeIdxA][nodeIdxB] for idxA in symVal.keys(): if idxA not in symVal: continue tempDict = symVal[idxA] tempList = list(tempDict.values()) idxB = tempDict.keys()[tempList.index(max(tempList))] #symPair[idxA] = idxB #symVal.pop(idxB, None) # Adding fix, need to recursively remove. Dirty fix for now. tempDict_p = symVal[idxB] tempList_p = list(tempDict_p.values()) idxA_p = tempDict_p.keys()[tempList_p.index(max(tempList_p))] if idxA == idxA_p: symPair[idxA] = idxB symVal.pop(idxB, None) else: val1 = tempDict[idxB] val2 = tempDict_p[idxA_p] if val1 > val2: symPair[idxA] = idxB symVal.pop(idxB, None) else: continue filename = dirName + ckt.name + ".sym" symFile = open(filename, "w") for idxA in symPair: idxB = symPair[idxA] nameA = ckt.node(idxA).name nameB = ckt.node(idxB).name if symVal[idxA][idxB] >= self.symTol: symFile.write("%s %s\n" % (nameA, nameB)) else: print "waived constraint", nameA, nameB, symVal[idxA][idxB] hierGraph = self.hierGraph(cktIdx) selfSym = self.selfSym(symPair, hierGraph) for idx in selfSym: name = ckt.node(idx).name symFile.write("%s\n" % name) symNet = self.symNet(cktIdx, symPair, selfSym) filename = dirName + ckt.name + ".symnet" netFile = open(filename, "w") for idxA in symNet: idxB = symNet[idxA] if idxA == idxB: name = ckt.net(idxA).name netFile.write("%s\n" % name) else: nameA = ckt.net(idxA).name nameB = ckt.net(idxB).name netFile.write("%s %s\n" % (nameA, nameB)) symFile.close() netFile.close()
def test_could_be_isomorphic(self): assert_true(nx.could_be_isomorphic(self.G1,self.G2)) assert_true(nx.could_be_isomorphic(self.G1,self.G3)) assert_false(nx.could_be_isomorphic(self.G1,self.G4)) assert_true(nx.could_be_isomorphic(self.G3,self.G2))
def analize_graph(g: nx.Graph, limit: int = 3, clean: bool = True, draw: bool = False, cmp_with: nx.Graph = None): assert isinstance(g, nx.Graph) assert isinstance(limit, int) assert cmp_with is None or isinstance(cmp_with, nx.Graph) def take_by_value(items, l, f=None): items = sorted(items, key=lambda t: t[1], reverse=True) if f is not None: items = filter(f, items) return [k for k, v in itertools.islice(items, 0, l)] labels = set() print('Graph analysis:') nodes = g.nodes(data=True) repos = {n for n, d in nodes if d['bipartite'] == 0} print('Repositories: {0}'.format(len(repos))) users = set(g) - repos print('Users: {0}'.format(len(users))) components = list(nx.connected_components(g)) print('Connected components: \n{0}'.format(sum(1 for _ in components))) languages = {d['language'] for n, d in nodes if d['bipartite'] == 0} print('Languages: \n{0}'.format(languages)) bridges = {(n1, n2): len(next((c for c in components if n1 in c), [])) for n1, n2 in nx.algorithms.bridges(g)} bridges = take_by_value(bridges.items(), limit) print('Connecting memberships: \n{0}'.format(list(bridges))) deg1_repos = [n for n in repos if g.degree[n] <= 1] print('Number of risked projects: \n{0}'.format(len(deg1_repos))) deg1_repos = sorted(deg1_repos, key=lambda n: -nodes[n].get('weight', 0)) print('Most risked projects: \n{0}'.format(deg1_repos[0:limit])) if cmp_with: print('Comparing graphs...') nm = iso.categorical_node_match(['bipartite', 'language'], [0, '?']) em = iso.categorical_edge_match('relation', 'contributor') #are_equal = nx.is_isomorphic(g, cmp_with, node_match=nm, edge_match=em) are_equal = nx.could_be_isomorphic(g, cmp_with) print('The graphs are similar.' if are_equal else 'The graphs are not isomorphic.') if clean: repo_count = len(repos) components = sorted(components, key=lambda c: len(c)) min_size = len(components[-1]) / 2 for component in components: component_repos = repos.intersection(component) if len(component_repos) <= 1 or len(component) < min_size: repos.difference_update(component) users.difference_update(component) # g.remove_nodes_from(component) if len(repos) < repo_count: print('Excluded {0} isolated projects.'.format(repo_count - len(repos))) # g = nx.classes.graphviews.subgraph_view(g, filter_node=lambda n: n in repos or n in users) g = nx.subgraph(g, repos.union(users)) if limit and repos: fork_sources = { n: d.get('fork_source') for n, d in nodes if d.get('relation') == 'fork' } fork_count = { n: sum(1 for k, v in fork_sources if v == n) for n in repos } fork_count = take_by_value(fork_count.items(), limit) labels.update(fork_count) print('Most forked projects: \n{0}'.format(fork_count)) repo_centrality = nx.algorithms.bipartite.degree_centrality(g, repos) repo_centrality = take_by_value(repo_centrality.items(), limit, f=lambda t: t[0] in repos) labels.update(repo_centrality) print('Most popular projects: \n{0}'.format(repo_centrality)) repo_centrality = nx.algorithms.bipartite.closeness_centrality( g, repos, normalized=True) repo_centrality = take_by_value(repo_centrality.items(), limit, f=lambda t: t[0] in repos) labels.update(repo_centrality) print('Most central projects: \n{0}'.format(repo_centrality)) user_centrality = nx.algorithms.bipartite.degree_centrality(g, users) user_centrality = take_by_value(user_centrality.items(), limit, f=lambda t: t[0] in users) labels.update(user_centrality) print('Most active users: \n{0}'.format(user_centrality)) user_languages = { u: len( set(nodes[n]['language'] for n in nx.neighbors(g, u) if nodes[n]['language'])) for u in users } user_centrality = nx.algorithms.bipartite.betweenness_centrality( g, users) user_centrality = take_by_value( user_centrality.items(), limit, f=lambda t: user_languages.get(t[0]) or 0 > 1) labels.update(user_centrality) print('Users connecting communities: \n{0}'.format(user_centrality)) if draw: draw_communities(g, labels=list(labels))