AdjToSpTensor()]) # We convert the binary masks to sample weights so that we can compute the # average loss over the nodes (following original implementation by # Kipf & Welling) def mask_to_weights(mask): return mask.astype(np.float32) / np.count_nonzero(mask) weights_tr, weights_va, weights_te = (mask_to_weights(mask) for mask in (dataset.mask_tr, dataset.mask_va, dataset.mask_te)) model = GCN(n_labels=dataset.n_labels, n_input_channels=dataset.n_node_features) model.compile( optimizer=Adam(learning_rate), loss=CategoricalCrossentropy(reduction="sum"), weighted_metrics=["acc"], ) # Train model loader_tr = SingleLoader(dataset, sample_weights=weights_tr) loader_va = SingleLoader(dataset, sample_weights=weights_va) model.fit( loader_tr.load(), steps_per_epoch=loader_tr.steps_per_epoch, validation_data=loader_va.load(), validation_steps=loader_va.steps_per_epoch, epochs=epochs,
from spektral.layers import GCNConv from spektral.models.gcn import GCN from spektral.transforms import AdjToSpTensor, LayerPreprocess from spektral.utils import tic, toc tf.random.set_seed(seed=0) # make weight initialization reproducible # Load data dataset = Cora(normalize_x=True, transforms=[LayerPreprocess(GCNConv), AdjToSpTensor()]) graph = dataset[0] x, a, y = graph.x, graph.a, graph.y mask_tr, mask_va, mask_te = dataset.mask_tr, dataset.mask_va, dataset.mask_te model = GCN(n_labels=dataset.n_labels) optimizer = Adam(lr=1e-2) loss_fn = CategoricalCrossentropy() # Training step @tf.function def train(): with tf.GradientTape() as tape: predictions = model([x, a], training=True) loss = loss_fn(y[mask_tr], predictions[mask_tr]) loss += sum(model.losses) gradients = tape.gradient(loss, model.trainable_variables) optimizer.apply_gradients(zip(gradients, model.trainable_variables)) return loss
from spektral.layers import GCNConv from spektral.models.gcn import GCN from spektral.transforms import AdjToSpTensor, LayerPreprocess from spektral.utils import tic, toc tf.random.set_seed(seed=0) # make weight initialization reproducible # Load data dataset = Cora(normalize_x=True, transforms=[LayerPreprocess(GCNConv), AdjToSpTensor()]) graph = dataset[0] x, a, y = graph.x, graph.a, graph.y mask_tr, mask_va, mask_te = dataset.mask_tr, dataset.mask_va, dataset.mask_te model = GCN(n_labels=dataset.n_labels, n_input_channels=dataset.n_node_features) optimizer = Adam(lr=1e-2) loss_fn = CategoricalCrossentropy() # Training step @tf.function def train(): with tf.GradientTape() as tape: predictions = model([x, a], training=True) loss = loss_fn(y[mask_tr], predictions[mask_tr]) loss += sum(model.losses) gradients = tape.gradient(loss, model.trainable_variables) optimizer.apply_gradients(zip(gradients, model.trainable_variables)) return loss