Beispiel #1
0
def test_mlgk_self_loops():

    kedge = Constant(1.0)
    knode = Constant(1.0)
    q = 0.1
    mlgk = MarginalizedGraphKernel(knode, kedge, q=q)

    np.random.seed(2)
    for i in range(10):
        n = np.random.randint(4, 20)
        A = np.random.randn(n, n)
        A = A + A.T

        G = [Graph.from_networkx(nx.from_numpy_array(A), weight='weight')]

        K = mlgk(G).item()
        K0 = MLGK(G[0], knode, kedge, q, q, nodal=False)

        assert (K == pytest.approx(K0, 5e-4))
Beispiel #2
0
def test_mlgk_typecheck():
    node_kernel = Constant(1.0)
    edge_kernel = Constant(1.0)
    mlgk = MarginalizedGraphKernel(node_kernel, edge_kernel, q=0.5)
    G = [
        Graph.from_networkx(unlabeled_graph1),
        Graph.from_networkx(labeled_graph1),
        Graph.from_networkx(weighted_graph1, weight='w')
    ]

    with pytest.raises(TypeError):
        mlgk([G[0], G[1]])
    with pytest.raises(TypeError):
        mlgk([G[0], G[2]])
    with pytest.raises(TypeError):
        mlgk([G[1], G[2]])
    with pytest.raises(TypeError):
        mlgk([G[1], G[0]])
    with pytest.raises(TypeError):
        mlgk([G[2], G[0]])
    with pytest.raises(TypeError):
        mlgk([G[2], G[1]])
Beispiel #3
0
def test_mlgk_large():
    g = nx.Graph()
    n = 24
    for i, row in enumerate(np.random.randint(0, 2, (n, n))):
        g.add_node(i, type=0)
        for j, pred in enumerate(row[:i]):
            if pred:
                g.add_edge(i, j, weight=1)

    dfg = Graph.from_networkx(g, weight='weight')

    q = 0.5
    node_kernel = TensorProduct(type=KroneckerDelta(1.0))
    edge_kernel = Constant(1.0)
    mlgk = MarginalizedGraphKernel(node_kernel, edge_kernel, q=q)

    dot = mlgk([dfg])
    gold = MLGK(dfg, node_kernel, edge_kernel, q, q)

    assert (dot.shape == (1, 1))
    assert (dot.item() == pytest.approx(gold))
Beispiel #4
0
def test_mlgk_dtype():
    g = nx.Graph()
    n = 8
    for i, row in enumerate(np.random.randint(0, 2, (n, n))):
        g.add_node(i, type=0)
        for j, pred in enumerate(row[:i]):
            if pred:
                g.add_edge(i, j, weight=1)

    dfg = Graph.from_networkx(g, weight='weight')

    q = 0.5
    node_kernel = TensorProduct(type=KroneckerDelta(1.0))
    edge_kernel = Constant(1.0)

    for dtype in [np.float, np.float32, np.float64]:
        mlgk = MarginalizedGraphKernel(node_kernel,
                                       edge_kernel,
                                       q=q,
                                       dtype=dtype)

        assert (mlgk([dfg]).dtype == dtype)
        assert (mlgk.diag([dfg]).dtype == dtype)
Beispiel #5
0
g2.add_edge(0, 1)
g2.add_edge(1, 2)

# {1.0, 1} -- {2.0, 1}
#     \         /
#      {1.0, 2}
g3 = nx.Graph()
g3.add_node(0, radius=1.0, category=1)
g3.add_node(1, radius=2.0, category=1)
g3.add_node(2, radius=1.0, category=2)
g3.add_edge(0, 1)
g3.add_edge(0, 2)
g3.add_edge(1, 2)

# define node and edge kernelets
knode = TensorProduct(radius=SquareExponential(0.5),
                      category=KroneckerDelta(0.5))

kedge = Constant(1.0)

# compose the marginalized graph kernel and compute pairwise similarity
mlgk = MarginalizedGraphKernel(knode, kedge, q=0.05)

R = mlgk([Graph.from_networkx(g) for g in [g1, g2, g3]])

# normalize the similarity matrix
d = np.diag(R)**-0.5
K = np.diag(d).dot(R).dot(np.diag(d))

print(K)
Beispiel #6
0
from graphdot import Graph
from graphdot.kernel.marginalized import MarginalizedGraphKernel
from graphdot.kernel.fix import Normalization
from graphdot.microkernel import (TensorProduct, DotProduct, Constant)

# The 'category' attribute on the nodes could have variable lengths.
# So does the 'spectra' attributes on the edges.
g1 = nx.Graph()
g1.add_node(0, soap=[0.5, 1.5, 2.5, 0.5])
g1.add_node(1, soap=[0.5, 1.5, 2.5, 0.5])
g1.add_edge(0, 1, w=1.0)

g2 = nx.Graph()
g2.add_node(0, soap=[0.5, 1.5, 2.5, 3.5])
g2.add_node(1, soap=[1.5, 1.5, 0.5, 3.5])
g2.add_node(2, soap=[0.5, 2.5, 2.5, 0.5])
g2.add_edge(0, 1, w=2.0)
g2.add_edge(0, 2, w=0.5)
g2.add_edge(1, 2, w=0.5)

# compose the marginalized graph kernel and compute pairwise similarity
mlgk = Normalization(
    MarginalizedGraphKernel(
        node_kernel=TensorProduct(soap=DotProduct().normalized),
        edge_kernel=Constant(1),
        q=0.05))

G = [Graph.from_networkx(g, weight='w') for g in [g1, g2]]
print(f'Whole-graph similarity\n{mlgk(G)}')
print(f'Nodal similarity\n{mlgk(G, nodal=True)}')
Beispiel #7
0
g2.add_node(2)
g2.add_edge(0, 1)
g2.add_edge(1, 2)

# 0 --- 1
#  \  /
#   2
g3 = nx.Graph()
g3.add_node(0)
g3.add_node(1)
g3.add_node(2)
g3.add_edge(0, 1)
g3.add_edge(0, 2)
g3.add_edge(1, 2)

# define trivial node and edge kernelets
knode = Constant(1.0)
kedge = Constant(1.0)

# compose the marginalized graph kernel and compute pairwise similarity
mlgk = MarginalizedGraphKernel(knode, kedge, q=0.05)

R = mlgk([Graph.from_networkx(g) for g in [g1, g2, g3]])

# normalize the similarity matrix
d = np.diag(R)**-0.5
K = np.diag(d).dot(R).dot(np.diag(d))

# all entries should be approximately 1 plus round-off error
print(K)
Beispiel #8
0
vario_graph1.add_edge('O1', 'H2', spectrum=(3, 5), w=2.0)

vario_graph2 = nx.Graph(title='H2')
vario_graph2.add_node('H1', rings=(3, 4))
vario_graph2.add_node('H2', rings=(3, ))
vario_graph2.add_edge('H1', 'H2', spectrum=(2, 4), w=3.0)

case_dict = {
    'unlabeled': {
        'graphs':
        Graph.unify_datatype([
            Graph.from_networkx(unlabeled_graph1),
            Graph.from_networkx(unlabeled_graph2)
        ]),
        'knode':
        Constant(1.0),
        'kedge':
        Constant(1.0),
        'q': [0.01, 0.05, 0.1, 0.5]
    },
    'labeled': {
        'graphs':
        Graph.unify_datatype([
            Graph.from_networkx(labeled_graph1),
            Graph.from_networkx(labeled_graph2)
        ]),
        'knode':
        TensorProduct(hybridization=KroneckerDelta(0.3),
                      charge=SquareExponential(1.) + 0.01).normalized,
        'kedge':
        Additive(order=KroneckerDelta(0.3),