def test_mlp_classification(): y_train_onehot = one_hot(y_train) y_test_onehot = one_hot(y_test) model = NeuralNet( layers=[ Dense(256, Parameters(init='uniform', regularizers={'W': L2(0.05)})), Activation('relu'), Dropout(0.5), Dense(128, Parameters(init='normal', constraints={'W': MaxNorm()})), Activation('relu'), Dense(2), Activation('softmax'), ], loss='categorical_crossentropy', optimizer=Adadelta(), metric='accuracy', batch_size=64, max_epochs=25, ) model.fit(X_train, y_train_onehot) predictions = model.predict(X_test) assert roc_auc_score(y_test_onehot[:, 0], predictions[:, 0]) >= 0.95
def classification(): # Generate a random binary classification problem. X, y = make_classification( n_samples=1000, n_features=100, n_informative=75, random_state=1111, n_classes=2, class_sep=2.5 ) y = one_hot(y) X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.15, random_state=1111) model = NeuralNet( layers=[ Dense(256, Parameters(init="uniform", regularizers={"W": L2(0.05)})), Activation("relu"), Dropout(0.5), Dense(128, Parameters(init="normal", constraints={"W": MaxNorm()})), Activation("relu"), Dense(2), Activation("softmax"), ], loss="categorical_crossentropy", optimizer=Adadelta(), metric="accuracy", batch_size=64, max_epochs=25, ) model.fit(X_train, y_train) predictions = model.predict(X_test) print("classification accuracy", roc_auc_score(y_test[:, 0], predictions[:, 0]))
def test_mlp(): y_train_onehot = one_hot(y_train) y_test_onehot = one_hot(y_test) model = NeuralNet( layers=[ Dense(256, Parameters(init="uniform", regularizers={"W": L2(0.05)})), Activation("relu"), Dropout(0.5), Dense(128, Parameters(init="normal", constraints={"W": MaxNorm()})), Activation("relu"), Dense(2), Activation("softmax"), ], loss="categorical_crossentropy", optimizer=Adadelta(), metric="accuracy", batch_size=64, max_epochs=25, ) model.fit(X_train, y_train_onehot) predictions = model.predict(X_test) assert roc_auc_score(y_test_onehot[:, 0], predictions[:, 0]) >= 0.95
def classification(): X, y = make_classification( n_samples=1000, n_features=100, n_informative=75, random_state=1111, n_classes=2, class_sep=2.5, ) y = one_hot(y) X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.15, random_state=1111) model = NeuralNet( layers=[ Dense(256, Parameters(init='uniform', regularizers={'W': L2(0.05)})), Activation('relu'), Dropout(0.5), Dense(128, Parameters(init='normal', constraints={'W': MaxNorm()})), Activation('relu'), Dense(2), Activation('softmax'), ], loss='categorical_crossentropy', optimizer=Adadelta(), metric='accuracy', batch_size=64, max_epochs=25, ) model.fit(X_train, y_train) predictions = model.predict(X_test) print('classification accuracy', roc_auc_score(y_test[:, 0], predictions[:, 0]))