nb_epoch = 20 # the data, shuffled and split between train and test sets (X_train, y_train), (X_test, y_test) = datasets.load_mnist() X_train = X_train.reshape(60000, 784) X_test = X_test.reshape(10000, 784) X_train = X_train.astype(S.floatX()) X_test = X_test.astype(S.floatX()) X_train /= 255 X_test /= 255 print(X_train.shape[0], 'train samples') print(X_test.shape[0], 'test samples') # convert class vectors to binary class matrices Y_train = S.categorical(y_train, 10) Y_test = S.categorical(y_test, 10) model = models.DeepNetwork() model.add(layers.InputLayer((None, 28 * 28))) model.add(layers.DenseLayer(512)) model.add(layers.ActivationLayer(S.relu)) model.add(layers.DenseLayer(484, activation=S.relu)) model.add(layers.DropoutLayer(0.2)) model.add(layers.DenseLayer(512, activation=S.relu)) model.add(layers.DropoutLayer(0.2)) model.add(layers.DenseLayer(10, activation=S.softmax)) adam = optimizers.Adam(learning_rate=0.02) adadelta = optimizers.AdaDELTA()
# the data, shuffled and split between train and test sets (X_train, y_train), (X_test, y_test) = datasets.load_mnist() X_train = X_train.reshape(X_train.shape[0], 1, img_rows, img_cols) X_test = X_test.reshape(X_test.shape[0], 1, img_rows, img_cols) X_train = X_train.astype(S.floatX()) X_test = X_test.astype(S.floatX()) X_train /= 255 X_test /= 255 print('X_train shape:', X_train.shape) print(X_train.shape[0], 'train samples') print(X_test.shape[0], 'test samples') # convert class vectors to binary class matrices Y_train = S.categorical(y_train, nb_classes) Y_test = S.categorical(y_test, nb_classes) model = models.OverlayModel() model.add(layers.InputLayer((None, 1, img_rows, img_cols))) model.add( layers.Conv2DLayer(nb_filters, (nb_conv, nb_conv), padding=0, activation=S.relu)) model.add( layers.Conv2DLayer(nb_filters, (nb_conv, nb_conv), padding=0, max_pool_shape=(nb_pool, nb_pool), activation=S.relu)) model.add(layers.DropoutLayer(0.25))