def _makekernel(self, **kwargs): self.kernel = MarginalizedGraphKernel( TensorProduct(element=KroneckerDelta(self.element_prior)), TensorProduct(length=SquareExponential(self.edge_length_scale)), q=self.stopping_probability, p=self.starting_probability, **kwargs)
def test_mlgk_on_permuted_graph(): g = Graph.from_ase(molecule('C6H6')) for _ in range(10): h = g.permute(np.random.permutation(len(g.nodes))) kernel = MarginalizedGraphKernel( TensorProduct(element=KroneckerDelta(0.5)), TensorProduct(length=SquareExponential(0.1))) assert (kernel([g], [h]).item() == pytest.approx(kernel([g]).item()))
def test_mlgk_kernel_range_check(): MarginalizedGraphKernel( node_kernel=KroneckerDelta(1e-7), edge_kernel=TensorProduct(attribute=SquareExponential(1.0))) MarginalizedGraphKernel( node_kernel=TensorProduct(feature=KroneckerDelta(0.5)), edge_kernel=TensorProduct(attribute=SquareExponential(1.0))) with pytest.warns(DeprecationWarning): MarginalizedGraphKernel( node_kernel=KroneckerDelta(0), edge_kernel=TensorProduct(attribute=SquareExponential(1.0))) with pytest.warns(DeprecationWarning): MarginalizedGraphKernel( node_kernel=TensorProduct(feature=KroneckerDelta(0.5)) + 1, edge_kernel=SquareExponential(1.0)) with pytest.warns(DeprecationWarning): MarginalizedGraphKernel( node_kernel=TensorProduct(feature=KroneckerDelta(0.5)), edge_kernel=TensorProduct(attribute=SquareExponential(1.0)) + 1) with pytest.warns(DeprecationWarning): MarginalizedGraphKernel( node_kernel=KroneckerDelta(0.5) * 2, edge_kernel=TensorProduct(attribute=SquareExponential(1.0))) with pytest.warns(DeprecationWarning): MarginalizedGraphKernel( node_kernel=TensorProduct(feature=KroneckerDelta(0.5)), edge_kernel=TensorProduct(attribute=SquareExponential(1.0)) * 2)
def test_mlgk_fixed_hyperparameters(): g = nx.Graph() g.add_node(0, feature=0) g.add_node(1, feature=1) g.add_node(2, feature=0) g.add_edge(0, 1, attribute=1.0) g.add_edge(0, 2, attribute=2.0) G = [Graph.from_networkx(g)] knodeV = TensorProduct(feature=KroneckerDelta(0.5)) knodeF = TensorProduct(feature=KroneckerDelta(0.5, h_bounds='fixed')) kedgeV = TensorProduct(attribute=SquareExponential(1.0)) kedgeF = TensorProduct( attribute=SquareExponential(1.0, length_scale_bounds='fixed')) kernelVV = MarginalizedGraphKernel(knodeV, kedgeV) kernelVF = MarginalizedGraphKernel(knodeV, kedgeF) kernelFV = MarginalizedGraphKernel(knodeF, kedgeV) kernelFF = MarginalizedGraphKernel(knodeF, kedgeF) assert (len(kernelVV.theta) == len(kernelVF.theta) + 1) assert (len(kernelVV.theta) == len(kernelFV.theta) + 1) assert (len(kernelVV.theta) == len(kernelFF.theta) + 2) assert (len(kernelVV.bounds) == len(kernelVF.bounds) + 1) assert (len(kernelVV.bounds) == len(kernelFV.bounds) + 1) assert (len(kernelVV.bounds) == len(kernelFF.bounds) + 2) Rvv, dRvv = kernelVV(G, eval_gradient=True) Rvf, dRvf = kernelVF(G, eval_gradient=True) Rfv, dRfv = kernelFV(G, eval_gradient=True) Rff, dRff = kernelFF(G, eval_gradient=True) assert (Rvv == pytest.approx(Rvf)) assert (Rvv == pytest.approx(Rfv)) assert (Rvv == pytest.approx(Rff)) assert (dRvv.shape[2] == dRvf.shape[2] + 1) assert (dRvv.shape[2] == dRfv.shape[2] + 1) assert (dRvv.shape[2] == dRff.shape[2] + 2) assert (dRvv[:, :, kernelVF.active_theta_mask] == pytest.approx(dRvf)) assert (dRvv[:, :, kernelFV.active_theta_mask] == pytest.approx(dRfv)) assert (dRvv[:, :, kernelFF.active_theta_mask] == pytest.approx(dRff))
def __init__(self, use_charge=False, adjacency='default', q=0.01, element_delta=0.2, bond_eps=0.02, charge_eps=0.2): self.use_charge = use_charge if adjacency == 'default': self.adjacency = AtomicAdjacency(shape='tent2', zoom=0.75) else: self.adjacency = adjacency self.q = q if use_charge: self.node_kernel = TensorProduct( element=KroneckerDelta(element_delta), charge=SquareExponential(charge_eps), ) else: self.node_kernel = TensorProduct( element=KroneckerDelta(element_delta)) self.edge_kernel = TensorProduct(length=SquareExponential(bond_eps))
def test_maximin_basic(): metric = MaxiMin(node_kernel=TensorProduct(element=KroneckerDelta(0.5)), edge_kernel=TensorProduct(length=SquareExponential(0.1)), q=0.01) distance = metric(G) assert distance.shape == (len(G), len(G)) assert np.allclose(distance.diagonal(), 0, atol=1e-3) assert np.all(distance >= 0) assert np.allclose(distance, distance.T, rtol=1e-14, atol=1e-14) distance = metric(G, G) assert distance.shape == (len(G), len(G)) assert np.allclose(distance.diagonal(), 0, atol=1e-3) assert np.all(distance >= 0) assert np.allclose(distance, distance.T, rtol=1e-4, atol=1e-4) distance = metric(G, H) assert distance.shape == (len(G), len(H)) assert np.all(distance >= 0)
g1 = nx.Graph() g1.add_node(0, category=(1, 2), symbol=1) g1.add_node(1, category=(2, ), symbol=2) g1.add_edge(0, 1, w=1.0, spectra=[0.5, 0.2]) g2 = nx.Graph() g2.add_node(0, category=(1, 3), symbol=1) g2.add_node(1, category=(2, 3, 5), symbol=2) g2.add_node(2, category=(1, ), symbol=1) g2.add_edge(0, 1, w=2.0, spectra=[0.1, 0.9, 1.5]) g2.add_edge(0, 2, w=0.5, spectra=[0.4]) g2.add_edge(1, 2, w=0.5, spectra=[0.3, 0.6]) # Define node and edge base kernels using the R-convolution framework # Reference: Haussler, David. Convolution kernels on discrete structures. 1999. knode = TensorProduct(symbol=KroneckerDelta(0.5), category=Convolution(KroneckerDelta(0.5))) kedge = TensorProduct(spectra=Convolution(SquareExponential(0.3))) # compose the marginalized graph kernel and compute pairwise similarity mlgk = MarginalizedGraphKernel(knode, kedge, q=0.05) R = mlgk([Graph.from_networkx(g, weight='w') for g in [g1, g2]]) # normalize the similarity matrix d = np.diag(R)**-0.5 K = np.diag(d).dot(R).dot(np.diag(d)) print(K)
g2.add_edge(0, 1) g2.add_edge(1, 2) # {1.0, 1} -- {2.0, 1} # \ / # {1.0, 2} g3 = nx.Graph() g3.add_node(0, radius=1.0, category=1) g3.add_node(1, radius=2.0, category=1) g3.add_node(2, radius=1.0, category=2) g3.add_edge(0, 1) g3.add_edge(0, 2) g3.add_edge(1, 2) # define node and edge kernelets knode = TensorProduct(radius=SquareExponential(0.5), category=KroneckerDelta(0.5)) kedge = Constant(1.0) # compose the marginalized graph kernel and compute pairwise similarity mlgk = MarginalizedGraphKernel(knode, kedge, q=0.05) R = mlgk([Graph.from_networkx(g) for g in [g1, g2, g3]]) # normalize the similarity matrix d = np.diag(R)**-0.5 K = np.diag(d).dot(R).dot(np.diag(d)) print(K)
from graphdot.metric.maximin import MaxiMin from ase.build import molecule np.set_printoptions(linewidth=999, precision=4, suppress=True) molecules = [ molecule('CH4'), molecule('NH3'), molecule('CH3OH'), molecule('H2O'), ] graphs = [Graph.from_ase(m) for m in molecules] metric = MaxiMin(node_kernel=TensorProduct(element=KroneckerDelta(0.5)), edge_kernel=TensorProduct(length=SquareExponential(0.1)), q=0.01) kernel = Normalization( MarginalizedGraphKernel( node_kernel=TensorProduct(element=KroneckerDelta(0.5)), edge_kernel=TensorProduct(length=SquareExponential(0.1)), q=0.01)) def check_hausdorff(X, Y=None): # GPU direct computation D = metric(X, Y) # Manual approach K = kernel(X, Y, nodal=True) d = np.sqrt(np.maximum(0, 2 - 2 * K)) starts1 = np.cumsum([0] + [len(g.nodes) for g in X])[:-1]
'CC', # ethane 'CCO', # acetic acid 'CCN', # ethylamine 'C=C', # ethene 'CC=C', # propene 'CC=CC', # 2-n-butene ] # convert to molecular graphs # nodes(atoms) has 'aromatic', 'charge', 'element', 'hcount' attributes # edges(bonds) has the 'order' attribute graphs = [Graph.from_smiles(smi) for smi in smiles_list] # define node and edge kernelets knode = TensorProduct(aromatic=KroneckerDelta(0.8), charge=SquareExponential(1.0), element=KroneckerDelta(0.5), hcount=SquareExponential(1.0)) kedge = TensorProduct(order=KroneckerDelta(0.5)) # compose the marginalized graph kernel and compute pairwise similarity kernel = MarginalizedGraphKernel(knode, kedge, q=0.05) R = kernel(graphs) # normalize the similarity matrix and then print d = np.diag(R)**-0.5 K = np.diag(d).dot(R).dot(np.diag(d)) print(pd.DataFrame(K, columns=smiles_list, index=smiles_list))
]), 'knode': Constant(1.0), 'kedge': Constant(1.0), 'q': [0.01, 0.05, 0.1, 0.5] }, 'labeled': { 'graphs': Graph.unify_datatype([ Graph.from_networkx(labeled_graph1), Graph.from_networkx(labeled_graph2) ]), 'knode': TensorProduct(hybridization=KroneckerDelta(0.3), charge=SquareExponential(1.) + 0.01).normalized, 'kedge': Additive(order=KroneckerDelta(0.3), length=SquareExponential(0.05)).normalized, 'q': [0.01, 0.05, 0.1, 0.5] }, 'weighted': { 'graphs': Graph.unify_datatype([ Graph.from_networkx(weighted_graph1, weight='w'), Graph.from_networkx(weighted_graph2, weight='w') ]), 'knode': Additive(hybridization=KroneckerDelta(0.3), charge=SquareExponential(1.0)).normalized, 'kedge':