예제 #1
0
"""
import tensorflow as tf
from tensorflow.keras.losses import CategoricalCrossentropy
from tensorflow.keras.optimizers import Adam

from spektral.datasets.citation import Cora
from spektral.layers import GCNConv
from spektral.models.gcn import GCN
from spektral.transforms import AdjToSpTensor, LayerPreprocess
from spektral.utils import tic, toc

tf.random.set_seed(seed=0)  # make weight initialization reproducible

# Load data
dataset = Cora(normalize_x=True,
               transforms=[LayerPreprocess(GCNConv),
                           AdjToSpTensor()])
graph = dataset[0]
x, a, y = graph.x, graph.a, graph.y
mask_tr, mask_va, mask_te = dataset.mask_tr, dataset.mask_va, dataset.mask_te

model = GCN(n_labels=dataset.n_labels)
optimizer = Adam(lr=1e-2)
loss_fn = CategoricalCrossentropy()


# Training step
@tf.function
def train():
    with tf.GradientTape() as tape:
        predictions = model([x, a], training=True)
예제 #2
0
    with tf.GradientTape() as tape:
        _, S_pool = model(inputs, training=True)
        loss = sum(model.losses)
    gradients = tape.gradient(loss, model.trainable_variables)
    opt.apply_gradients(zip(gradients, model.trainable_variables))
    return model.losses[0], model.losses[1], S_pool


np.random.seed(1)
epochs = 5000  # Training iterations
lr = 5e-4  # Learning rate

################################################################################
# LOAD DATASET
################################################################################
dataset = Cora()
adj, x, y = dataset[0].a, dataset[0].x, dataset[0].y
a_norm = normalized_adjacency(adj)
a_norm = sp_matrix_to_sp_tensor(a_norm)
F = dataset.n_node_features
y = np.argmax(y, axis=-1)
n_clusters = y.max() + 1

################################################################################
# MODEL
################################################################################
x_in = Input(shape=(F, ), name="X_in")
a_in = Input(shape=(None, ), name="A_in", sparse=True)

x_1 = GCSConv(16, activation="elu")([x_in, a_in])
x_1, a_1, s_1 = MinCutPool(n_clusters, return_selection=True)([x_1, a_in])
예제 #3
0
import tensorflow as tf
from tensorflow.keras.layers import Input, Dropout
from tensorflow.keras.losses import CategoricalCrossentropy
from tensorflow.keras.metrics import CategoricalAccuracy
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.regularizers import l2

from spektral.datasets.citation import Cora
from spektral.layers import GATConv
from spektral.transforms import LayerPreprocess, AdjToSpTensor
from spektral.utils import tic, toc

# Load data
dataset = Cora(transforms=[LayerPreprocess(GATConv), AdjToSpTensor()])
graph = dataset[0]
x, a, y = graph.x, graph.a, graph.y
mask_tr, mask_va, mask_te = dataset.mask_tr, dataset.mask_va, dataset.mask_te

# Define model
x_in = Input(shape=(dataset.n_node_features, ))
a_in = Input(shape=(None, ), sparse=True)
x_1 = Dropout(0.6)(x_in)
x_1 = GATConv(8,
              attn_heads=8,
              concat_heads=True,
              dropout_rate=0.6,
              activation='elu',
              kernel_regularizer=l2(5e-4),
              attn_kernel_regularizer=l2(5e-4),