def check_patterns(G1, G2, saveisolist=False, readisolist=False, plot=False): # Check isomorphism GM = GraphMatcher(G1=G1, G2=G2, node_match=None, edge_match=None) isomorph = GM.subgraph_is_isomorphic() if saveisolist: # Check if the pickles folder exists if not os.path.isdir("./pickles/"): os.makedirs("./pickles/") # List all isomorphisms between the two graphs isomorph_list = list(GM.subgraph_isomorphisms_iter()) # Save isomorphism list pickling_on = open('pickles/arch2_patt8.pickle', "wb") pickle.dump(isomorph_list, pickling_on) pickling_on.close() if readisolist: # Read pickle file pickle_off = open('pickles/arch2_patt8.pickle', "rb") isomorph_list = pickle.load(pickle_off) pickle_off.close() if plot: # Plot a sample isomorph options = { 'line_color': 'grey', 'font_size': 10, 'node_size': 10, 'with_labels': True } G3 = G1.subgraph(isomorph_list[0]) plt.figure(1) nx.draw(G3, **options) plt.figure(2) nx.draw(G2, **options) plt.show() return isomorph
def revert(self, hg: Hypergraph, return_subhg=False): ''' revert applying this production rule. i.e., if there exists a subhypergraph that matches the r.h.s. of this production rule, this method replaces the subhypergraph with a non-terminal hyperedge. Parameters ---------- hg : Hypergraph hypergraph to be reverted return_subhg : bool if True, the removed subhypergraph will be returned. Returns ------- hg : Hypergraph the resultant hypergraph. if it cannot be reverted, the original one is returned without any replacement. success : bool this indicates whether reverting is successed or not. ''' gm = GraphMatcher(hg.hg, self.rhs.hg, node_match=_node_match_prod_rule, edge_match=_edge_match) try: # in case when the matched subhg is connected to the other part via external nodes and more. not_iso = True while not_iso: isomap = next(gm.subgraph_isomorphisms_iter()) adj_node_set = set([]) # reachable nodes from the internal nodes subhg_node_set = set(isomap.keys()) # nodes in subhg for each_node in subhg_node_set: adj_node_set.add(each_node) if isomap[each_node] not in self.ext_node.values(): adj_node_set.update(hg.hg.adj[each_node]) if adj_node_set == subhg_node_set: not_iso = False else: if return_subhg: return hg, False, Hypergraph() else: return hg, False inv_isomap = {v: k for k, v in isomap.items()} ''' isomap = {'e35': 'e8', 'bond_13': 'bond_18', 'bond_14': 'bond_19', 'bond_15': 'bond_17', 'e29': 'e23', 'bond_12': 'bond_20'} where keys come from `hg` and values come from `self.rhs` ''' except StopIteration: if return_subhg: return hg, False, Hypergraph() else: return hg, False if return_subhg: subhg = Hypergraph() for each_node in hg.nodes: if each_node in isomap: subhg.add_node(each_node, attr_dict=hg.node_attr(each_node)) for each_edge in hg.edges: if each_edge in isomap: subhg.add_edge(hg.nodes_in_edge(each_edge), attr_dict=hg.edge_attr(each_edge), edge_name=each_edge) subhg.edge_idx = hg.edge_idx # remove subhg except for the externael nodes for each_key, each_val in isomap.items(): if each_key.startswith('e'): hg.remove_edge(each_key) for each_key, each_val in isomap.items(): if each_key.startswith('bond_'): if each_val not in self.ext_node.values(): hg.remove_node(each_key) # add non-terminal hyperedge nt_node_list = [] for each_ext_id in self.ext_node.keys(): nt_node_list.append(inv_isomap[self.ext_node[each_ext_id]]) hg.add_edge(nt_node_list, attr_dict=dict( terminal=False, symbol=self.lhs_nt_symbol)) if return_subhg: return hg, True, subhg else: return hg, True
rev_tm, train_map = Partition.get_train_node_sets(dataset)[1] g1 = nx.Graph() g1.add_edges_from([(x[0], x[-1]) for x in dataset.triple1]) g2 = nx.Graph() g2.add_edges_from([(train_map[x[0]], train_map[x[-1]]) for x in dataset.triple2]) nx.set_edge_attributes(g1, True, 'g1') nx.set_edge_attributes(g2, True, 'g2') pattern = nx.compose(src_pattern, trg_pattern) print(pattern.edges) g = nx.compose(g1, g2) gm = GraphMatcher(g, pattern, edge_match=lambda x, y: (x.get('g1', False) == y.get('g1', False)) and (x.get('g2', False) == y.get('g2', False))) import utils e1, e2 = map(utils.mp2list, dataset.ents) for subgraph in gm.subgraph_isomorphisms_iter(): # print('src is', subgraph.keys()) # target_nodes = [train_map[0][x] for x in subgraph.keys()] subkeys = sorted(subgraph.keys(), key=lambda x: subgraph[x]) print('--------') print('e1s are: \n', '\n'.join(e1[i] for i in subkeys)) print('\ne2s are: ', '\n'.join(e2[rev_tm[i]] for i in subkeys))
def clone_subgraphs(self, g): if not isinstance(g, CGRContainer): raise InvalidData('only CGRContainer acceptable') r_group = [] x_group = {} r_group_clones = [] newcomponents = [] ''' search bond breaks and creations ''' components, lost_bonds, term_atoms = self.__split_graph(g) lost_map = {x: y for x, y in lost_bonds} ''' extract subgraphs and sort by group type (R or X) ''' x_terminals = set(lost_map.values()) r_terminals = set(lost_map) for i in components: x_terminal_atom = x_terminals.intersection(i) if x_terminal_atom: x_group[x_terminal_atom.pop()] = i continue r_terminal_atom = r_terminals.intersection(i) if r_terminal_atom: r_group.append([r_terminal_atom, i]) continue newcomponents.append(i) ''' search similar R groups and patch. ''' tmp = g for i in newcomponents: for k, j in r_group: gm = GraphMatcher(j, i, node_match=self.__node_match_products, edge_match=self.__edge_match_products) ''' search for similar R-groups started from bond breaks. ''' mapping = next((x for x in gm.subgraph_isomorphisms_iter() if k.issubset(x) and all(x[y] in term_atoms for y in k)), None) if mapping: r_group_clones.append([k, mapping]) tmp = compose(tmp, self.__remap_group(j, tmp, mapping)[0]) break ''' add lose X groups to R groups ''' for i, j in r_group_clones: for k in i: remappedgroup, mapping = self.__remap_group( x_group[lost_map[k]], tmp, {}) tmp = CGRcore.union(tmp, remappedgroup) tmp.add_edge(j[k], mapping[lost_map[k]], s_bond=1, sp_bond=(1, None)) if r_group_clones: tmp.meta.update(g.meta) return tmp.copy()