Example #1
0
def test_delaunay():
    adj, nf, labels = delaunay.generate_data('numpy')
    correctly_padded(adj, nf, None)
    assert adj.shape[0] == labels.shape[0]

    # Test that it doesn't crash
    delaunay.generate_data('networkx')
Example #2
0
def test_delaunay():
    adj, nf, labels = delaunay.generate_data(return_type='numpy',
                                             classes=[0, 1, 2])
    correctly_padded(adj, nf, None)
    assert adj.shape[0] == labels.shape[0]

    # Test that it doesn't crash
    delaunay.generate_data(return_type='networkx')
from keras.callbacks import EarlyStopping
from keras.layers import Input, Dense
from keras.models import Model
from keras.optimizers import Adam
from keras.regularizers import l2
from sklearn.model_selection import train_test_split

from spektral.datasets import delaunay
from spektral.layers import GraphConv, GlobalAttentionPool
from spektral.utils import localpooling_filter
from spektral.utils.logging import init_logging

# Load data
adj, x, y = delaunay.generate_data(return_type='numpy', classes=[0, 5])

# Parameters
N = x.shape[-2]  # Number of nodes in the graphs
F = x.shape[-1]  # Original feature dimensionality
n_classes = y.shape[-1]  # Number of classes
l2_reg = 5e-4  # Regularization rate for l2
learning_rate = 1e-3  # Learning rate for Adam
epochs = 200  # Number of training epochs
batch_size = 32  # Batch size
es_patience = 10  # Patience fot early stopping
log_dir = init_logging()  # Create log directory and file

# Preprocessing
fltr = localpooling_filter(adj.copy())

# Train/test split
fltr_train, fltr_test, \