def test_mlp_classification():
    y_train_onehot = one_hot(y_train)
    y_test_onehot = one_hot(y_test)

    model = NeuralNet(
        layers=[
            Dense(256, Parameters(init='uniform', regularizers={'W':
                                                                L2(0.05)})),
            Activation('relu'),
            Dropout(0.5),
            Dense(128, Parameters(init='normal', constraints={'W':
                                                              MaxNorm()})),
            Activation('relu'),
            Dense(2),
            Activation('softmax'),
        ],
        loss='categorical_crossentropy',
        optimizer=Adadelta(),
        metric='accuracy',
        batch_size=64,
        max_epochs=25,
    )
    model.fit(X_train, y_train_onehot)
    predictions = model.predict(X_test)
    assert roc_auc_score(y_test_onehot[:, 0], predictions[:, 0]) >= 0.95
Esempio n. 2
0
def classification():
    # Generate a random binary classification problem.
    X, y = make_classification(
        n_samples=1000, n_features=100, n_informative=75, random_state=1111, n_classes=2, class_sep=2.5
    )
    y = one_hot(y)
    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.15, random_state=1111)

    model = NeuralNet(
        layers=[
            Dense(256, Parameters(init="uniform", regularizers={"W": L2(0.05)})),
            Activation("relu"),
            Dropout(0.5),
            Dense(128, Parameters(init="normal", constraints={"W": MaxNorm()})),
            Activation("relu"),
            Dense(2),
            Activation("softmax"),
        ],
        loss="categorical_crossentropy",
        optimizer=Adadelta(),
        metric="accuracy",
        batch_size=64,
        max_epochs=25,
    )
    model.fit(X_train, y_train)
    predictions = model.predict(X_test)
    print("classification accuracy", roc_auc_score(y_test[:, 0], predictions[:, 0]))
def regression():
    # Generate a random regression problem
    X, y = make_regression(n_samples=5000, n_features=25, n_informative=25,
                           n_targets=1, random_state=100, noise=0.05)
    y *= 0.01
    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.1,
                                                        random_state=1111)

    model = NeuralNet(
        layers=[
            Dense(64, Parameters(init='normal')),
            Activation('linear'),
            Dense(32, Parameters(init='normal')),
            Activation('linear'),
            Dense(1),
        ],
        loss='mse',
        optimizer=Adam(),
        metric='mse',
        batch_size=256,
        max_epochs=15,
    )
    model.fit(X_train, y_train)
    predictions = model.predict(X_test)
    print("regression mse", mean_squared_error(y_test, predictions.flatten()))
Esempio n. 4
0
def clasifier(optimizer):
    X, y = make_classification(n_samples=1000,
                               n_features=100,
                               n_informative=75,
                               random_state=1111,
                               n_classes=2,
                               class_sep=2.5)
    y = one_hot(y)

    X -= np.mean(X, axis=0)
    X /= np.std(X, axis=0)
    X_train, X_test, y_train, y_test = train_test_split(X,
                                                        y,
                                                        test_size=0.15,
                                                        random_state=1111)

    model = NeuralNet(
        layers=[
            Dense(128, Parameters(init="uniform")),
            Activation("relu"),
            Dropout(0.5),
            Dense(64, Parameters(init="normal")),
            Activation("relu"),
            Dense(2),
            Activation("softmax"),
        ],
        loss="categorical_crossentropy",
        optimizer=optimizer,
        metric="accuracy",
        batch_size=64,
        max_epochs=10,
    )
    model.fit(X_train, y_train)
    predictions = model.predict(X_test)
    return roc_auc_score(y_test[:, 0], predictions[:, 0])
Esempio n. 5
0
def test_mlp():
    model = NeuralNet(
        layers=[
            Dense(16, Parameters(init='normal')),
            Activation('linear'),
            Dense(8, Parameters(init='normal')),
            Activation('linear'),
            Dense(1),
        ],
        loss='mse',
        optimizer=Adam(),
        metric='mse',
        batch_size=64,
        max_epochs=150,
    )
    model.fit(X_train, y_train)
    predictions = model.predict(X_test)
    assert mean_squared_error(y_test, predictions.flatten()) < 1.0
Esempio n. 6
0
def mlp_model(n_actions, batch_size=64):
    model = NeuralNet(
        layers=[Dense(32), Activation("relu"),
                Dense(n_actions)],
        loss="mse",
        optimizer=Adam(),
        metric="mse",
        batch_size=batch_size,
        max_epochs=1,
        verbose=False,
    )
    return model
Esempio n. 7
0
def test_mlp():
    y_train_onehot = one_hot(y_train)
    y_test_onehot = one_hot(y_test)

    model = NeuralNet(
        layers=[
            Dense(256, Parameters(init="uniform", regularizers={"W": L2(0.05)})),
            Activation("relu"),
            Dropout(0.5),
            Dense(128, Parameters(init="normal", constraints={"W": MaxNorm()})),
            Activation("relu"),
            Dense(2),
            Activation("softmax"),
        ],
        loss="categorical_crossentropy",
        optimizer=Adadelta(),
        metric="accuracy",
        batch_size=64,
        max_epochs=25,
    )
    model.fit(X_train, y_train_onehot)
    predictions = model.predict(X_test)
    assert roc_auc_score(y_test_onehot[:, 0], predictions[:, 0]) >= 0.95
Esempio n. 8
0
def classification():
    X, y = make_classification(
        n_samples=1000,
        n_features=100,
        n_informative=75,
        random_state=1111,
        n_classes=2,
        class_sep=2.5,
    )
    y = one_hot(y)
    X_train, X_test, y_train, y_test = train_test_split(X,
                                                        y,
                                                        test_size=0.15,
                                                        random_state=1111)

    model = NeuralNet(
        layers=[
            Dense(256, Parameters(init='uniform', regularizers={'W':
                                                                L2(0.05)})),
            Activation('relu'),
            Dropout(0.5),
            Dense(128, Parameters(init='normal', constraints={'W':
                                                              MaxNorm()})),
            Activation('relu'),
            Dense(2),
            Activation('softmax'),
        ],
        loss='categorical_crossentropy',
        optimizer=Adadelta(),
        metric='accuracy',
        batch_size=64,
        max_epochs=25,
    )
    model.fit(X_train, y_train)
    predictions = model.predict(X_test)
    print('classification accuracy',
          roc_auc_score(y_test[:, 0], predictions[:, 0]))
Esempio n. 9
0
def addition_nlp(ReccurentLayer):
    X_train, X_test, y_train, y_test = addition_dataset(8, 5000)

    print(X_train.shape, X_test.shape)
    model = NeuralNet(
        layers=[
            ReccurentLayer,
            TimeDistributedDense(1),
            Activation('sigmoid'),
        ],
        loss='mse',
        optimizer=Adam(),
        metric='mse',
        batch_size=64,
        max_epochs=15,
    )
    model.fit(X_train, y_train)
    predictions = np.round(model.predict(X_test))
    predictions = np.packbits(predictions.astype(np.uint8))
    y_test = np.packbits(y_test.astype(np.int))
    print(accuracy(y_test, predictions))
Esempio n. 10
0
print(X.shape, y.shape)
# LSTM OR RNN
# rnn_layer = RNN(128, return_sequences=False)
rnn_layer = LSTM(
    128,
    return_sequences=False,
)

model = NeuralNet(
    layers=[
        rnn_layer,
        # Flatten(),
        # TimeStepSlicer(-1),
        Dense(X.shape[2]),
        Activation('softmax'),
    ],
    loss='categorical_crossentropy',
    optimizer=RMSprop(learning_rate=0.01),
    metric='accuracy',
    batch_size=64,
    max_epochs=1,
    shuffle=False,
)

for _ in range(25):
    model.fit(X, y)
    start_index = random.randint(0, len(text) - maxlen - 1)

    generated = ''
    sentence = text[start_index:start_index + maxlen]
Esempio n. 11
0
# Normalization
X_train /= 255.
X_test /= 255.

y_train = one_hot(y_train.flatten())
y_test = one_hot(y_test.flatten())
print(X_train.shape, X_test.shape, y_train.shape, y_test.shape)

# Approx. 15-20 min. per epoch
model = NeuralNet(
    layers=[
        Convolution(n_filters=32,
                    filter_shape=(3, 3),
                    padding=(1, 1),
                    stride=(1, 1)),
        Activation('relu'),
        Convolution(n_filters=32,
                    filter_shape=(3, 3),
                    padding=(1, 1),
                    stride=(1, 1)),
        Activation('relu'),
        MaxPooling(pool_shape=(2, 2), stride=(2, 2)),
        Dropout(0.5),
        Flatten(),
        Dense(128),
        Activation('relu'),
        Dropout(0.5),
        Dense(10),
        Activation('softmax'),
    ],
    loss='categorical_crossentropy',
Esempio n. 12
0
# Normalize data
X_train /= 255.0
X_test /= 255.0

y_train = one_hot(y_train.flatten())
y_test = one_hot(y_test.flatten())
print(X_train.shape, X_test.shape, y_train.shape, y_test.shape)

# Approx. 15-20 min. per epoch
model = NeuralNet(
    layers=[
        Convolution(n_filters=32,
                    filter_shape=(3, 3),
                    padding=(1, 1),
                    stride=(1, 1)),
        Activation("relu"),
        Convolution(n_filters=32,
                    filter_shape=(3, 3),
                    padding=(1, 1),
                    stride=(1, 1)),
        Activation("relu"),
        MaxPooling(pool_shape=(2, 2), stride=(2, 2)),
        Dropout(0.5),
        Flatten(),
        Dense(128),
        Activation("relu"),
        Dropout(0.5),
        Dense(10),
        Activation("softmax"),
    ],
    loss="categorical_crossentropy",
maxlen = X.shape[1]
X = X[0:items_count]
y = y[0:items_count]

print(X.shape, y.shape)
# LSTM OR RNN
# rnn_layer = RNN(128, return_sequences=False)
rnn_layer = LSTM(128, return_sequences=False)

model = NeuralNet(
    layers=[
        rnn_layer,
        # Flatten(),
        # TimeStepSlicer(-1),
        Dense(X.shape[2]),
        Activation("softmax"),
    ],
    loss="categorical_crossentropy",
    optimizer=RMSprop(learning_rate=0.01),
    metric="accuracy",
    batch_size=64,
    max_epochs=1,
    shuffle=False,
)

for _ in range(25):
    model.fit(X, y)
    start_index = random.randint(0, len(text) - maxlen - 1)

    generated = ""
    sentence = text[start_index : start_index + maxlen]