Пример #1
0
def mnist_regular_graph(k=8):
    ''' load mnist dataset as graph. The graph is created using knn.
        You can change the number of neighbors per node by changing k.
        params:
            k: number of neighbors per node
        returns:
            X_train, y_train, X_val, y_val, X_test, y_test, A, node_positions
    '''
    X_train, y_train, X_val, y_val, X_test, y_test, A = mnist.load_data(k)
    X_train = X_train[..., np.newaxis]
    X_test = X_test[..., np.newaxis]
    y_train = keras.utils.to_categorical(y_train, 10)
    y_test = keras.utils.to_categorical(y_test, 10)

    n_x, n_y = (28, 28)
    x = np.linspace(0, 10, n_x)
    y = np.linspace(0, 10, n_y)
    y = np.flip(y)
    xv, yv = np.meshgrid(x, y)
    pos = np.stack([xv.flatten(), yv.flatten()], axis=-1)
    return X_train, y_train, X_val, y_val, X_test, y_test, A.toarray(), pos
Пример #2
0
def test_mnist():
    mnist.load_data()
Пример #3
0
from keras.regularizers import l2

from spektral.datasets import mnist
from spektral.layers import GraphConv
from spektral.layers.ops import sp_matrix_to_sp_tensor
from spektral.utils import normalized_laplacian

# Parameters
l2_reg = 5e-4         # Regularization rate for l2
learning_rate = 1e-3  # Learning rate for SGD
batch_size = 32       # Batch size
epochs = 20000        # Number of training epochs
es_patience = 200     # Patience fot early stopping

# Load data
X_train, y_train, X_val, y_val, X_test, y_test, adj = mnist.load_data()
X_train, X_val, X_test = X_train[..., None], X_val[..., None], X_test[..., None]
N = X_train.shape[-2]      # Number of nodes in the graphs
F = X_train.shape[-1]      # Node features dimensionality
n_out = y_train.shape[-1]  # Dimension of the target

fltr = normalized_laplacian(adj)

# Model definition
X_in = Input(shape=(N, F))
# Pass A as a fixed tensor, otherwise Keras will complain about inputs of
# different rank.
A_in = Input(tensor=sp_matrix_to_sp_tensor(fltr))

graph_conv = GraphConv(32,
                       activation='elu',
Пример #4
0
def test_mnist():
    mnist.load_data(k=8, noise_level=0.1)
from tensorflow.keras.regularizers import l2

from spektral.datasets import mnist
from spektral.layers import GraphConv
from spektral.layers.ops import sp_matrix_to_sp_tensor
from spektral.utils import batch_iterator

# Parameters
learning_rate = 1e-3  # Learning rate for Adam
batch_size = 32       # Batch size
epochs = 1000         # Number of training epochs
patience = 10         # Patience for early stopping
l2_reg = 5e-4         # Regularization rate for l2

# Load data
x_tr, y_tr, x_va, y_va, x_te, y_te, A = mnist.load_data()
x_tr, x_va, x_te = x_tr[..., None], x_va[..., None], x_te[..., None]
N = x_tr.shape[-2]    # Number of nodes in the graphs
F = x_tr.shape[-1]    # Node features dimensionality
n_out = 10            # Dimension of the target

# Create filter for GCN and convert to sparse tensor
fltr = GraphConv.preprocess(A)
fltr = sp_matrix_to_sp_tensor(fltr)


# Build model
class Net(Model):
    def __init__(self, **kwargs):
        super().__init__(**kwargs)
        self.conv1 = GraphConv(32, activation='elu', kernel_regularizer=l2(l2_reg))