Example #1
0
def test_l1_neural_network_regressor_with_proximal_bundle():
    X, y = load_boston(return_X_y=True)
    X_scaled = StandardScaler().fit_transform(X)
    X_train, X_test, y_train, y_test = train_test_split(X_scaled,
                                                        y,
                                                        train_size=0.75,
                                                        random_state=123456)
    net = NeuralNetworkRegressor(
        (FullyConnected(13, 13, relu), FullyConnected(13, 1, linear)),
        loss=mean_absolute_error,
        optimizer=ProximalBundle,
        max_iter=150)
    net.fit(X_train, y_train)
    assert net.score(X_test, y_test) >= 0.83
Example #2
0
def test_neural_network_regressor_with_stochastic_optimizer():
    X, y = load_boston(return_X_y=True)
    X_scaled = StandardScaler().fit_transform(X)
    X_train, X_test, y_train, y_test = train_test_split(X_scaled,
                                                        y,
                                                        train_size=0.75,
                                                        random_state=1)
    net = NeuralNetworkRegressor(
        (FullyConnected(13, 13, sigmoid), FullyConnected(
            13, 13, sigmoid), FullyConnected(13, 1, linear)),
        loss=mean_squared_error,
        optimizer=Adam,
        learning_rate=0.02)
    net.fit(X_train, y_train)
    assert net.score(X_test, y_test) >= 0.84
Example #3
0
def test_l2_neural_network_regressor_with_stochastic_optimizer():
    X, y = load_boston(return_X_y=True)
    X_scaled = StandardScaler().fit_transform(X)
    X_train, X_test, y_train, y_test = train_test_split(X_scaled,
                                                        y,
                                                        train_size=0.75,
                                                        random_state=123456)
    net = NeuralNetworkRegressor(
        (FullyConnected(13, 13, sigmoid), FullyConnected(13, 1, linear)),
        loss=mean_squared_error,
        optimizer=StochasticGradientDescent,
        learning_rate=0.01,
        momentum_type='nesterov',
        momentum=0.9)
    net.fit(X_train, y_train)
    assert net.score(X_test, y_test) >= 0.83
Example #4
0
def test_perceptron_regressor_with_line_search_optimizer():
    # aka linear regression
    X, y = load_boston(return_X_y=True)
    net = NeuralNetworkRegressor(
        (FullyConnected(13, 1, linear, fit_intercept=False), ),
        loss=mean_squared_error,
        optimizer=Newton).fit(X, y)
    assert np.allclose(net.coefs_[0], net.loss.x_star())
Example #5
0
def test_perceptron_ridge_regressor_with_line_search_optimizer():
    # aka ridge regression
    X, y = load_boston(return_X_y=True)
    lmbda = 0.1
    net = NeuralNetworkRegressor((FullyConnected(
        13, 1, linear, coef_reg=L2(lmbda), fit_intercept=False), ),
                                 loss=mean_squared_error,
                                 optimizer=Newton).fit(X, y)
    assert np.allclose(
        net.coefs_[0].ravel(),
        np.linalg.inv(X.T.dot(X) + np.identity(net.loss.ndim) * lmbda).dot(
            X.T).dot(y))