def main(backend): (X_train, y_train), (X_test, y_test) = get_mnist(backend) # set the backend nn = net.set_backend(backend) model = nn.Sequential() model.add(nn.Dense(512, input_shape=(784,))) model.add(nn.Activation('relu')) model.add(nn.BatchNormalization()) model.add(nn.Dropout(p=0.2)) model.add(nn.Dense(512)) model.add(nn.Activation('relu')) model.add(nn.BatchNormalization()) model.add(nn.Dropout(p=0.2)) model.add(nn.Dense(10)) model.add(nn.Activation('softmax')) rms = nn.Adam() model.compile(loss='categorical_crossentropy', optimizer=rms) model.fit(X_train, y_train, nb_epoch=20)#, optimizer=rms) proba = model.predict(X_test) pred = np.argmax(proba, axis=1) if backend == 'keras': print 'Test error: {:.05f}'.format(np.mean(pred == np.argmax(y_test, axis=1))) else: print 'Test error: {:.05f}'.format(np.mean(pred == y_test)) return model
def main(backend): print('Loading data...') X, labels = load_data('train.csv', train=True) X, scaler = preprocess_data(X) if backend == 'keras': y, encoder = preprocess_labels(labels) nb_classes = y.shape[1] print(nb_classes, 'classes') else: y, encoder = preprocess_labels(labels, categorical=False) nb_classes = len(np.unique(y)) print(nb_classes, 'classes') dims = X.shape[1] print(dims, 'dims') nn = net.set_backend(backend) model = nn.Sequential() model.add(nn.Dense(512, input_shape=(dims,))) model.add(nn.PReLU()) model.add(nn.BatchNormalization()) model.add(nn.Dropout(p=0.5)) model.add(nn.Dense(512)) model.add(nn.PReLU()) model.add(nn.BatchNormalization()) model.add(nn.Dropout(p=0.5)) model.add(nn.Dense(512)) model.add(nn.PReLU()) model.add(nn.BatchNormalization()) model.add(nn.Dropout(p=0.5)) model.add(nn.Dense(nb_classes)) model.add(nn.Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer='adam') model.fit(X, y, nb_epoch=20, batch_size=128, validation_split=0.15) return model