Ejemplo n.º 1
0
Archivo: data.py Proyecto: jackd/grax
def remove_back_edges(single: SemiSupervisedSingle) -> SemiSupervisedSingle:
    nn = single.num_nodes
    lil = [[] for _ in range(nn)]
    graph = ops.to_coo(single.graph)
    for r, c in zip(graph.row.to_py(), graph.col.to_py()):
        # lil[r].append(c)
        lil[c].append(r)
    algorithms.remove_back_edges(lil)
    lengths = jnp.array([len(l) for l in lil])
    rows = jnp.repeat(jnp.arange(nn, dtype=jnp.int32), lengths)
    cols = jnp.concatenate(
        [
            np.array(l, np.int32) if len(l) else np.zeros((0, ), np.int32)
            for l in lil
        ],
        axis=0,
    )
    graph = COO((jnp.ones((rows.size, )), rows, cols), shape=(nn, nn))
    return SemiSupervisedSingle(
        single.node_features,
        graph,
        single.labels,
        single.train_ids,
        single.validation_ids,
        single.test_ids,
    )
Ejemplo n.º 2
0
 def f(z, x, *graph_components):
     A = COO(graph_components, shape=(x.shape[0],) * 2)
     z = GraphConvolution(x.shape[-1], kernel_initializer=w_init)(A, z)
     z = activation(z + x)
     if use_layer_norm:
         return layer_norm(z)
     return z
Ejemplo n.º 3
0
def star_adjacency(num_nodes: int, dtype=jnp.float32) -> COO:
    """Get the adjacency matrix of an undirected star graph."""
    row = jnp.zeros((num_nodes - 1), dtype=jnp.int32)
    col = jnp.arange(1, num_nodes, dtype=jnp.int32)
    row, col = jnp.concatenate((row, col)), jnp.concatenate((col, row))
    return COO(
        (jnp.ones((2 * (num_nodes - 1), ), dtype=dtype), row, col),
        shape=(num_nodes, num_nodes),
    )
Ejemplo n.º 4
0
def from_scipy(mat_sp) -> JAXSparse:
    assert sp.isspmatrix(mat_sp)
    if sp.isspmatrix_coo(mat_sp):
        return COO((mat_sp.data, mat_sp.row, mat_sp.col), shape=mat_sp.shape)
    if sp.isspmatrix_csr(mat_sp):
        return CSR((mat_sp.data, mat_sp.indices, mat_sp.indptr),
                   shape=mat_sp.shape)
    raise NotImplementedError(
        f"Only coo_matrix and csr_matrix supported, got {type(mat_sp)}")
Ejemplo n.º 5
0
def random_adjacency(key: jnp.ndarray,
                     num_nodes: int,
                     num_edges: int,
                     dtype=jnp.float32) -> COO:
    """
    Get the adjacency matrix of a random fully connected undirected graph.

    Note that `num_edges` is only approximate. The process of creating edges it:
    - sample `num_edges` random edges
    - remove self-edges
    - add ring edges
    - add reverse edges
    - filter duplicates

    Args:
        key: `jax.random.PRNGKey`.
        num_nodes: number of nodes in returned graph.
        num_edges: number of random internal edges initially added.
        dtype: dtype of returned JAXSparse.

    Returns:
        COO, shape (num_nodes, num_nodes), weights all ones.
    """
    shape = num_nodes, num_nodes

    internal_indices = jax.random.uniform(
        key,
        shape=(num_edges, ),
        dtype=jnp.float32,
        maxval=num_nodes**2,
    ).astype(jnp.int32)
    # remove randomly sampled self-edges.
    self_edges = (internal_indices // num_nodes) == (internal_indices %
                                                     num_nodes)
    internal_indices = internal_indices[jnp.logical_not(self_edges)]

    # add a ring so we know the graph is connected
    r = jnp.arange(num_nodes, dtype=jnp.int32)
    ring_indices = r * num_nodes + (r + 1) % num_nodes
    indices = jnp.concatenate((internal_indices, ring_indices))

    # add reverse indices
    coords = jnp.unravel_index(indices, shape)
    coords_rev = coords[-1::-1]
    indices_rev = jnp.ravel_multi_index(coords_rev, shape)
    indices = jnp.concatenate((indices, indices_rev))

    # filter out duplicates
    indices = jnp.unique(indices)
    row, col = jnp.unravel_index(indices, shape)
    return COO((jnp.ones((row.size, ), dtype=dtype), row, col), shape=shape)
Ejemplo n.º 6
0
 def __call__(self, row, col, features):
     attn = hk.Linear(self.attention_size)(features)
     vals = jax.nn.sigmoid(jnp.einsum("na,na->n", attn[row], attn[col]))
     adj = COO((vals, row, col), shape=(features.shape[0], ) * 2)
     epsilon = jax.nn.sigmoid(
         hk.get_parameter(
             "epsilon_sig_inv",
             shape=(),
             dtype=features.dtype,
             init=lambda _, dtype: jnp.asarray(-2.0, dtype=dtype),
         ))
     propagator = _get_propagator(adj, epsilon, self.tol)
     features = hk.Linear(self.out_size)(features)
     return propagator @ features
Ejemplo n.º 7
0
Archivo: data.py Proyecto: jackd/grax
def _load_dgl_example(dgl_example,
                      make_symmetric=False,
                      sparse_features=False) -> SemiSupervisedSingle:
    feat, label = (dgl_example.ndata[k].numpy() for k in ("feat", "label"))
    if sparse_features:
        i, j = np.where(feat)
        feat = COO((feat[i, j], i, j), shape=feat.shape)
    train_ids, validation_ids, test_ids = (jnp.where(
        dgl_example.ndata[k].numpy())[0] if k in dgl_example.ndata else None
                                           for k in ("train_mask", "val_mask",
                                                     "test_mask"))
    graph = _load_dgl_graph(dgl_example, make_symmetric=make_symmetric)
    label = jnp.asarray(label)

    return SemiSupervisedSingle(feat, graph, label, train_ids, validation_ids,
                                test_ids)
Ejemplo n.º 8
0
def _normalized_laplacian(x: COO, epsilon: float = 0.0):
    num_nodes = x.shape[0]
    ## Do transform in scipy to avoid memory issues on large graphs
    # x = spax.ops.scale(symmetric_normalize(x), -(1 - epsilon))
    # # x = lin_ops.identity_plus(lin_ops.MatrixWrapper(x, is_self_adjoint=True))
    # with jax.experimental.enable_x64():
    #     x = spax.ops.add(spax.eye(num_nodes, x.dtype, x.row.dtype), x)

    x = to_scipy(x)
    assert sp.isspmatrix_coo(x)
    d = sp.linalg.norm(x, ord=1, axis=1)
    factor = jax.lax.rsqrt(d)
    x = sp.coo_matrix((x.data * factor[x.row] * factor[x.col], (x.row, x.col)),
                      shape=x.shape)
    x = sp.eye(num_nodes, dtype=x.dtype) - (1 - epsilon) * x
    x = x.tocoo()
    return from_scipy(x), jnp.asarray(d, dtype=x.dtype)
Ejemplo n.º 9
0
Archivo: data.py Proyecto: jackd/grax
def _load_dgl_graph(dgl_example, make_symmetric=False):
    r, c = (x.numpy() for x in dgl_example.edges())
    shape = (dgl_example.num_nodes(), ) * 2
    if make_symmetric:
        # add symmetric edges
        r = np.array(r, dtype=np.int64)
        c = np.array(c, dtype=np.int64)
        # remove diagonals
        valid = r != c
        r = r[valid]
        c = c[valid]
        r, c = np.concatenate((r, c)), np.concatenate((c, r))
        i1d = np.ravel_multi_index((r, c), shape)
        i1d = np.unique(i1d)  # also sorts
        r, c = np.unravel_index(  # pylint: disable=unbalanced-tuple-unpacking
            i1d, shape)
    # return sp.coo_matrix((np.ones((r.size,), dtype=np.float32), (r, c)), shape=shape)
    return COO((jnp.ones((r.size, ), dtype=jnp.float32), r, c), shape=shape)
Ejemplo n.º 10
0
def igcn_fn(h, x, graph_v, graph_r, graph_c, epsilon=0.1, use_layer_norm: bool = True):
    graph = COO((graph_v, graph_r, graph_c), shape=(x.shape[0],) * 2)
    out = (1 - epsilon) * (graph @ h) + x
    if use_layer_norm:
        out = layer_norm(out)
    return out
Ejemplo n.º 11
0
 def propagate(features, attn, eps, row, col):
     adj = COO((attn, row, col), shape=(features.shape[0], ) * 2)
     propagator = _get_propagator(adj, eps, self.tol)
     return propagator @ features