def test_mlp_classification():
    y_train_onehot = one_hot(y_train)
    y_test_onehot = one_hot(y_test)

    model = NeuralNet(
        layers=[
            Dense(256, Parameters(init='uniform', regularizers={'W':
                                                                L2(0.05)})),
            Activation('relu'),
            Dropout(0.5),
            Dense(128, Parameters(init='normal', constraints={'W':
                                                              MaxNorm()})),
            Activation('relu'),
            Dense(2),
            Activation('softmax'),
        ],
        loss='categorical_crossentropy',
        optimizer=Adadelta(),
        metric='accuracy',
        batch_size=64,
        max_epochs=25,
    )
    model.fit(X_train, y_train_onehot)
    predictions = model.predict(X_test)
    assert roc_auc_score(y_test_onehot[:, 0], predictions[:, 0]) >= 0.95
Пример #2
0
def classification():
    # Generate a random binary classification problem.
    X, y = make_classification(
        n_samples=1000, n_features=100, n_informative=75, random_state=1111, n_classes=2, class_sep=2.5
    )
    y = one_hot(y)
    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.15, random_state=1111)

    model = NeuralNet(
        layers=[
            Dense(256, Parameters(init="uniform", regularizers={"W": L2(0.05)})),
            Activation("relu"),
            Dropout(0.5),
            Dense(128, Parameters(init="normal", constraints={"W": MaxNorm()})),
            Activation("relu"),
            Dense(2),
            Activation("softmax"),
        ],
        loss="categorical_crossentropy",
        optimizer=Adadelta(),
        metric="accuracy",
        batch_size=64,
        max_epochs=25,
    )
    model.fit(X_train, y_train)
    predictions = model.predict(X_test)
    print("classification accuracy", roc_auc_score(y_test[:, 0], predictions[:, 0]))
Пример #3
0
def clasifier(optimizer):
    X, y = make_classification(n_samples=1000,
                               n_features=100,
                               n_informative=75,
                               random_state=1111,
                               n_classes=2,
                               class_sep=2.5)
    y = one_hot(y)

    X -= np.mean(X, axis=0)
    X /= np.std(X, axis=0)
    X_train, X_test, y_train, y_test = train_test_split(X,
                                                        y,
                                                        test_size=0.15,
                                                        random_state=1111)

    model = NeuralNet(
        layers=[
            Dense(128, Parameters(init="uniform")),
            Activation("relu"),
            Dropout(0.5),
            Dense(64, Parameters(init="normal")),
            Activation("relu"),
            Dense(2),
            Activation("softmax"),
        ],
        loss="categorical_crossentropy",
        optimizer=optimizer,
        metric="accuracy",
        batch_size=64,
        max_epochs=10,
    )
    model.fit(X_train, y_train)
    predictions = model.predict(X_test)
    return roc_auc_score(y_test[:, 0], predictions[:, 0])
Пример #4
0
def test_mlp():
    y_train_onehot = one_hot(y_train)
    y_test_onehot = one_hot(y_test)

    model = NeuralNet(
        layers=[
            Dense(256, Parameters(init="uniform", regularizers={"W": L2(0.05)})),
            Activation("relu"),
            Dropout(0.5),
            Dense(128, Parameters(init="normal", constraints={"W": MaxNorm()})),
            Activation("relu"),
            Dense(2),
            Activation("softmax"),
        ],
        loss="categorical_crossentropy",
        optimizer=Adadelta(),
        metric="accuracy",
        batch_size=64,
        max_epochs=25,
    )
    model.fit(X_train, y_train_onehot)
    predictions = model.predict(X_test)
    assert roc_auc_score(y_test_onehot[:, 0], predictions[:, 0]) >= 0.95
Пример #5
0
def classification():
    X, y = make_classification(
        n_samples=1000,
        n_features=100,
        n_informative=75,
        random_state=1111,
        n_classes=2,
        class_sep=2.5,
    )
    y = one_hot(y)
    X_train, X_test, y_train, y_test = train_test_split(X,
                                                        y,
                                                        test_size=0.15,
                                                        random_state=1111)

    model = NeuralNet(
        layers=[
            Dense(256, Parameters(init='uniform', regularizers={'W':
                                                                L2(0.05)})),
            Activation('relu'),
            Dropout(0.5),
            Dense(128, Parameters(init='normal', constraints={'W':
                                                              MaxNorm()})),
            Activation('relu'),
            Dense(2),
            Activation('softmax'),
        ],
        loss='categorical_crossentropy',
        optimizer=Adadelta(),
        metric='accuracy',
        batch_size=64,
        max_epochs=25,
    )
    model.fit(X_train, y_train)
    predictions = model.predict(X_test)
    print('classification accuracy',
          roc_auc_score(y_test[:, 0], predictions[:, 0]))
Пример #6
0
# Approx. 15-20 min. per epoch
model = NeuralNet(
    layers=[
        Convolution(n_filters=32,
                    filter_shape=(3, 3),
                    padding=(1, 1),
                    stride=(1, 1)),
        Activation('relu'),
        Convolution(n_filters=32,
                    filter_shape=(3, 3),
                    padding=(1, 1),
                    stride=(1, 1)),
        Activation('relu'),
        MaxPooling(pool_shape=(2, 2), stride=(2, 2)),
        Dropout(0.5),
        Flatten(),
        Dense(128),
        Activation('relu'),
        Dropout(0.5),
        Dense(10),
        Activation('softmax'),
    ],
    loss='categorical_crossentropy',
    optimizer=Adadelta(),
    metric='accuracy',
    batch_size=128,
    max_epochs=3,
)

model.fit(X_train, y_train)