Exemple #1
0
def classification():
    # Generate a random binary classification problem.
    X, y = make_classification(
        n_samples=1000, n_features=100, n_informative=75, random_state=1111, n_classes=2, class_sep=2.5
    )
    y = one_hot(y)
    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.15, random_state=1111)

    model = NeuralNet(
        layers=[
            Dense(256, Parameters(init="uniform", regularizers={"W": L2(0.05)})),
            Activation("relu"),
            Dropout(0.5),
            Dense(128, Parameters(init="normal", constraints={"W": MaxNorm()})),
            Activation("relu"),
            Dense(2),
            Activation("softmax"),
        ],
        loss="categorical_crossentropy",
        optimizer=Adadelta(),
        metric="accuracy",
        batch_size=64,
        max_epochs=25,
    )
    model.fit(X_train, y_train)
    predictions = model.predict(X_test)
    print("classification accuracy", roc_auc_score(y_test[:, 0], predictions[:, 0]))
def test_mlp_classification():
    y_train_onehot = one_hot(y_train)
    y_test_onehot = one_hot(y_test)

    model = NeuralNet(
        layers=[
            Dense(256, Parameters(init='uniform', regularizers={'W':
                                                                L2(0.05)})),
            Activation('relu'),
            Dropout(0.5),
            Dense(128, Parameters(init='normal', constraints={'W':
                                                              MaxNorm()})),
            Activation('relu'),
            Dense(2),
            Activation('softmax'),
        ],
        loss='categorical_crossentropy',
        optimizer=Adadelta(),
        metric='accuracy',
        batch_size=64,
        max_epochs=25,
    )
    model.fit(X_train, y_train_onehot)
    predictions = model.predict(X_test)
    assert roc_auc_score(y_test_onehot[:, 0], predictions[:, 0]) >= 0.95
def regression():
    # Generate a random regression problem
    X, y = make_regression(n_samples=5000, n_features=25, n_informative=25,
                           n_targets=1, random_state=100, noise=0.05)
    y *= 0.01
    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.1,
                                                        random_state=1111)

    model = NeuralNet(
        layers=[
            Dense(64, Parameters(init='normal')),
            Activation('linear'),
            Dense(32, Parameters(init='normal')),
            Activation('linear'),
            Dense(1),
        ],
        loss='mse',
        optimizer=Adam(),
        metric='mse',
        batch_size=256,
        max_epochs=15,
    )
    model.fit(X_train, y_train)
    predictions = model.predict(X_test)
    print("regression mse", mean_squared_error(y_test, predictions.flatten()))
def clasifier(optimizer):
    X, y = make_classification(n_samples=1000, n_features=100, n_informative=75, random_state=1111, n_classes=2,
                               class_sep=2.5, )
    y = one_hot(y)

    X -= np.mean(X, axis=0)
    X /= np.std(X, axis=0)
    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.15, random_state=1111)

    model = NeuralNet(
        layers=[
            Dense(128, Parameters(init='uniform')),
            Activation('relu'),
            Dropout(0.5),
            Dense(64, Parameters(init='normal')),
            Activation('relu'),
            Dense(2),
            Activation('softmax'),
        ],
        loss='categorical_crossentropy',
        optimizer=optimizer,
        metric='accuracy',
        batch_size=64,
        max_epochs=10,

    )
    model.fit(X_train, y_train)
    predictions = model.predict(X_test)
    return roc_auc_score(y_test[:, 0], predictions[:, 0])
def clasifier(optimizer):
    X, y = make_classification(n_samples=1000,
                               n_features=100,
                               n_informative=75,
                               random_state=1111,
                               n_classes=2,
                               class_sep=2.5)
    y = one_hot(y)

    X -= np.mean(X, axis=0)
    X /= np.std(X, axis=0)
    X_train, X_test, y_train, y_test = train_test_split(X,
                                                        y,
                                                        test_size=0.15,
                                                        random_state=1111)

    model = NeuralNet(
        layers=[
            Dense(128, Parameters(init="uniform")),
            Activation("relu"),
            Dropout(0.5),
            Dense(64, Parameters(init="normal")),
            Activation("relu"),
            Dense(2),
            Activation("softmax"),
        ],
        loss="categorical_crossentropy",
        optimizer=optimizer,
        metric="accuracy",
        batch_size=64,
        max_epochs=10,
    )
    model.fit(X_train, y_train)
    predictions = model.predict(X_test)
    return roc_auc_score(y_test[:, 0], predictions[:, 0])
def test_mlp():
    model = NeuralNet(
        layers=[
            Dense(16, Parameters(init='normal')),
            Activation('linear'),
            Dense(8, Parameters(init='normal')),
            Activation('linear'),
            Dense(1),
        ],
        loss='mse',
        optimizer=Adam(),
        metric='mse',
        batch_size=64,
        max_epochs=150,
    )
    model.fit(X_train, y_train)
    predictions = model.predict(X_test)
    assert mean_squared_error(y_test, predictions.flatten()) < 1.0
Exemple #7
0
def test_mlp():
    model = NeuralNet(
        layers=[
            Dense(16, Parameters(init='normal')),
            Activation('linear'),
            Dense(8, Parameters(init='normal')),
            Activation('linear'),
            Dense(1),
        ],
        loss='mse',
        optimizer=Adam(),
        metric='mse',
        batch_size=64,
        max_epochs=150,
    )
    model.fit(X_train, y_train)
    predictions = model.predict(X_test)
    assert mean_squared_error(y_test, predictions.flatten()) < 1.0
Exemple #8
0
def addition_nlp(ReccurentLayer):
    X_train, X_test, y_train, y_test = addition_dataset(8, 5000)

    print(X_train.shape, X_test.shape)
    model = NeuralNet(
        layers=[
            ReccurentLayer,
            TimeDistributedDense(1),
            Activation('sigmoid'),
        ],
        loss='mse',
        optimizer=Adam(),
        metric='mse',
        batch_size=64,
        max_epochs=15,
    )
    model.fit(X_train, y_train)
    predictions = np.round(model.predict(X_test))
    predictions = np.packbits(predictions.astype(np.uint8))
    y_test = np.packbits(y_test.astype(np.int))
    print(accuracy(y_test, predictions))
Exemple #9
0
def test_mlp():
    y_train_onehot = one_hot(y_train)
    y_test_onehot = one_hot(y_test)

    model = NeuralNet(
        layers=[
            Dense(256, Parameters(init="uniform", regularizers={"W": L2(0.05)})),
            Activation("relu"),
            Dropout(0.5),
            Dense(128, Parameters(init="normal", constraints={"W": MaxNorm()})),
            Activation("relu"),
            Dense(2),
            Activation("softmax"),
        ],
        loss="categorical_crossentropy",
        optimizer=Adadelta(),
        metric="accuracy",
        batch_size=64,
        max_epochs=25,
    )
    model.fit(X_train, y_train_onehot)
    predictions = model.predict(X_test)
    assert roc_auc_score(y_test_onehot[:, 0], predictions[:, 0]) >= 0.95
def classification():
    X, y = make_classification(
        n_samples=1000,
        n_features=100,
        n_informative=75,
        random_state=1111,
        n_classes=2,
        class_sep=2.5,
    )
    y = one_hot(y)
    X_train, X_test, y_train, y_test = train_test_split(X,
                                                        y,
                                                        test_size=0.15,
                                                        random_state=1111)

    model = NeuralNet(
        layers=[
            Dense(256, Parameters(init='uniform', regularizers={'W':
                                                                L2(0.05)})),
            Activation('relu'),
            Dropout(0.5),
            Dense(128, Parameters(init='normal', constraints={'W':
                                                              MaxNorm()})),
            Activation('relu'),
            Dense(2),
            Activation('softmax'),
        ],
        loss='categorical_crossentropy',
        optimizer=Adadelta(),
        metric='accuracy',
        batch_size=64,
        max_epochs=25,
    )
    model.fit(X_train, y_train)
    predictions = model.predict(X_test)
    print('classification accuracy',
          roc_auc_score(y_test[:, 0], predictions[:, 0]))
def test_mlp():
    y_train_onehot = one_hot(y_train)
    y_test_onehot = one_hot(y_test)

    model = NeuralNet(
        layers=[
            Dense(256, Parameters(init='uniform', regularizers={'W': L2(0.05)})),
            Activation('relu'),
            Dropout(0.5),
            Dense(128, Parameters(init='normal', constraints={'W': MaxNorm()})),
            Activation('relu'),
            Dense(2),
            Activation('softmax'),
        ],
        loss='categorical_crossentropy',
        optimizer=Adadelta(),
        metric='accuracy',
        batch_size=64,
        max_epochs=25,

    )
    model.fit(X_train, y_train_onehot)
    predictions = model.predict(X_test)
    assert roc_auc_score(y_test_onehot[:, 0], predictions[:, 0]) >= 0.95
Exemple #12
0
    optimizer=RMSprop(learning_rate=0.01),
    metric='accuracy',
    batch_size=64,
    max_epochs=1,
    shuffle=False,
)

for _ in range(25):
    model.fit(X, y)
    start_index = random.randint(0, len(text) - maxlen - 1)

    generated = ''
    sentence = text[start_index:start_index + maxlen]
    generated += sentence
    print('----- Generating with seed: "' + sentence + '"')
    sys.stdout.write(generated)
    for i in range(100):
        x = np.zeros((64, maxlen, len(chars)))
        for t, char in enumerate(sentence):
            x[0, t, char_indices[char]] = 1.
        preds = model.predict(x)[0]
        next_index = sample(preds, 0.5)
        next_char = indices_char[next_index]

        generated += next_char
        sentence = sentence[1:] + next_char

        sys.stdout.write(next_char)
        sys.stdout.flush()
    print()
Exemple #13
0
    layers=[
        Convolution(n_filters=32,
                    filter_shape=(3, 3),
                    padding=(1, 1),
                    stride=(1, 1)),
        Activation('relu'),
        Convolution(n_filters=32,
                    filter_shape=(3, 3),
                    padding=(1, 1),
                    stride=(1, 1)),
        Activation('relu'),
        MaxPooling(pool_shape=(2, 2), stride=(2, 2)),
        Dropout(0.5),
        Flatten(),
        Dense(128),
        Activation('relu'),
        Dropout(0.5),
        Dense(10),
        Activation('softmax'),
    ],
    loss='categorical_crossentropy',
    optimizer=Adadelta(),
    metric='accuracy',
    batch_size=128,
    max_epochs=3,
)

model.fit(X_train, y_train)
predictions = model.predict(X_test)
print(accuracy(y_test, predictions))
y_test = one_hot(y_test.flatten())
print(X_train.shape, X_test.shape, y_train.shape, y_test.shape)

# Approx. 15-20 min. per epoch
model = NeuralNet(
    layers=[
        Convolution(n_filters=32, filter_shape=(3, 3), padding=(1, 1), stride=(1, 1)),
        Activation('relu'),
        Convolution(n_filters=32, filter_shape=(3, 3), padding=(1, 1), stride=(1, 1)),
        Activation('relu'),
        MaxPooling(pool_shape=(2, 2), stride=(2, 2)),
        Dropout(0.5),

        Flatten(),
        Dense(128),
        Activation('relu'),
        Dropout(0.5),
        Dense(10),
        Activation('softmax'),
    ],
    loss='categorical_crossentropy',
    optimizer=Adadelta(),
    metric='accuracy',
    batch_size=128,
    max_epochs=3,
)

model.fit(X_train, y_train)
predictions = model.predict(X_test)
print(accuracy(y_test, predictions))
    metric='accuracy',
    batch_size=64,
    max_epochs=1,
    shuffle=False,

)

for _ in range(25):
    model.fit(X, y)
    start_index = random.randint(0, len(text) - maxlen - 1)

    generated = ''
    sentence = text[start_index: start_index + maxlen]
    generated += sentence
    print('----- Generating with seed: "' + sentence + '"')
    sys.stdout.write(generated)
    for i in range(100):
        x = np.zeros((64, maxlen, len(chars)))
        for t, char in enumerate(sentence):
            x[0, t, char_indices[char]] = 1.
        preds = model.predict(x)[0]
        next_index = sample(preds, 0.5)
        next_char = indices_char[next_index]

        generated += next_char
        sentence = sentence[1:] + next_char

        sys.stdout.write(next_char)
        sys.stdout.flush()
    print()