def test_warm_start():
    X, y, _, _ = build_dataset()
    clf = ElasticNet(alpha=1.0, max_iter=50, warm_start=True)
    clf.fit(X, y)
    clf.set_params(alpha=0.1)
    clf.fit(X, y)

    clf2 = ElasticNet(alpha=0.1, max_iter=500)
    clf2.fit(X, y)
    assert_array_almost_equal(clf2.coef_, clf.coef_)
def test_warm_start():
    X, y, _, _ = build_dataset()
    clf = ElasticNet(alpha=1.0, max_iter=50, warm_start=True)
    clf.fit(X, y)
    clf.set_params(alpha=0.1)
    clf.fit(X, y)

    clf2 = ElasticNet(alpha=0.1, max_iter=500)
    clf2.fit(X, y)
    assert_array_almost_equal(clf2.coef_, clf.coef_)
def test_enet_toy():
    """
    Test ElasticNet for various parameters of alpha and l1_ratio.

    Actually, the parameters alpha = 0 should not be allowed. However,
    we test it as a border case.

    ElasticNet is tested with and without precomputed Gram matrix
    """

    X = np.array([[-1.], [0.], [1.]])
    Y = [-1, 0, 1]       # just a straight line
    T = [[2.], [3.], [4.]]  # test sample

    # this should be the same as lasso
    clf = ElasticNet(alpha=1e-8, l1_ratio=1.0)
    clf.fit(X, Y)
    pred = clf.predict(T)
    assert_array_almost_equal(clf.coef_, [1])
    assert_array_almost_equal(pred, [2, 3, 4])
    assert_almost_equal(clf.dual_gap_, 0)

    clf = ElasticNet(alpha=0.5, l1_ratio=0.3, max_iter=100,
                     precompute=False)
    clf.fit(X, Y)
    pred = clf.predict(T)
    assert_array_almost_equal(clf.coef_, [0.50819], decimal=3)
    assert_array_almost_equal(pred, [1.0163, 1.5245, 2.0327], decimal=3)
    assert_almost_equal(clf.dual_gap_, 0)

    clf.set_params(max_iter=100, precompute=True)
    clf.fit(X, Y)  # with Gram
    pred = clf.predict(T)
    assert_array_almost_equal(clf.coef_, [0.50819], decimal=3)
    assert_array_almost_equal(pred, [1.0163, 1.5245, 2.0327], decimal=3)
    assert_almost_equal(clf.dual_gap_, 0)

    clf.set_params(max_iter=100, precompute=np.dot(X.T, X))
    clf.fit(X, Y)  # with Gram
    pred = clf.predict(T)
    assert_array_almost_equal(clf.coef_, [0.50819], decimal=3)
    assert_array_almost_equal(pred, [1.0163, 1.5245, 2.0327], decimal=3)
    assert_almost_equal(clf.dual_gap_, 0)

    clf = ElasticNet(alpha=0.5, l1_ratio=0.5)
    clf.fit(X, Y)
    pred = clf.predict(T)
    assert_array_almost_equal(clf.coef_, [0.45454], 3)
    assert_array_almost_equal(pred, [0.9090, 1.3636, 1.8181], 3)
    assert_almost_equal(clf.dual_gap_, 0)
Ejemplo n.º 4
0
def test_enet_small():
    """Toy tests with generated X and Y"""

    # TODO: add \theta prior knowledge here and test the output

    X = np.array([[-1.], [0.], [1.]])
    Y = [-1, 0, 1]          # a straight line
    T = [[2.], [3.], [4.]]  # test sample

    # this should be the same as lasso
    clf = ElasticNet(alpha=1e-8, l1_ratio=1.0)
    clf.fit(X, Y)
    pred = clf.predict(T)
    assert_array_almost_equal(clf.coef_, [1])
    assert_array_almost_equal(pred, [2, 3, 4])
    assert_almost_equal(clf.dual_gap_, 0)

    clf = ElasticNet(alpha=0.5, l1_ratio=0.3, max_iter=100,
                     precompute=False)
    clf.fit(X, Y)
    pred = clf.predict(T)
    assert_array_almost_equal(clf.coef_, [0.50819], decimal=3)
    assert_array_almost_equal(pred, [1.0163, 1.5245, 2.0327], decimal=3)
    assert_almost_equal(clf.dual_gap_, 0)

    clf.set_params(max_iter=100, precompute=True)
    clf.fit(X, Y)  # with Gram
    pred = clf.predict(T)
    assert_array_almost_equal(clf.coef_, [0.50819], decimal=3)
    assert_array_almost_equal(pred, [1.0163, 1.5245, 2.0327], decimal=3)
    assert_almost_equal(clf.dual_gap_, 0)

    clf.set_params(max_iter=100, precompute=np.dot(X.T, X))
    clf.fit(X, Y)  # with Gram
    pred = clf.predict(T)
    assert_array_almost_equal(clf.coef_, [0.50819], decimal=3)
    assert_array_almost_equal(pred, [1.0163, 1.5245, 2.0327], decimal=3)
    assert_almost_equal(clf.dual_gap_, 0)

    clf = ElasticNet(alpha=0.5, l1_ratio=0.5)
    clf.fit(X, Y)
    pred = clf.predict(T)
    assert_array_almost_equal(clf.coef_, [0.45454], 3)
    assert_array_almost_equal(pred, [0.9090, 1.3636, 1.8181], 3)
    assert_almost_equal(clf.dual_gap_, 0)
Ejemplo n.º 5
0
def test_enet_small():
    """Toy tests with generated X and Y"""

    # TODO: add \theta prior knowledge here and test the output

    X = np.array([[-1.], [0.], [1.]])
    Y = [-1, 0, 1]  # a straight line
    T = [[2.], [3.], [4.]]  # test sample

    # this should be the same as lasso
    clf = ElasticNet(alpha=1e-8, l1_ratio=1.0)
    clf.fit(X, Y)
    pred = clf.predict(T)
    assert_array_almost_equal(clf.coef_, [1])
    assert_array_almost_equal(pred, [2, 3, 4])
    assert_almost_equal(clf.dual_gap_, 0)

    clf = ElasticNet(alpha=0.5, l1_ratio=0.3, max_iter=100, precompute=False)
    clf.fit(X, Y)
    pred = clf.predict(T)
    assert_array_almost_equal(clf.coef_, [0.50819], decimal=3)
    assert_array_almost_equal(pred, [1.0163, 1.5245, 2.0327], decimal=3)
    assert_almost_equal(clf.dual_gap_, 0)

    clf.set_params(max_iter=100, precompute=True)
    clf.fit(X, Y)  # with Gram
    pred = clf.predict(T)
    assert_array_almost_equal(clf.coef_, [0.50819], decimal=3)
    assert_array_almost_equal(pred, [1.0163, 1.5245, 2.0327], decimal=3)
    assert_almost_equal(clf.dual_gap_, 0)

    clf.set_params(max_iter=100, precompute=np.dot(X.T, X))
    clf.fit(X, Y)  # with Gram
    pred = clf.predict(T)
    assert_array_almost_equal(clf.coef_, [0.50819], decimal=3)
    assert_array_almost_equal(pred, [1.0163, 1.5245, 2.0327], decimal=3)
    assert_almost_equal(clf.dual_gap_, 0)

    clf = ElasticNet(alpha=0.5, l1_ratio=0.5)
    clf.fit(X, Y)
    pred = clf.predict(T)
    assert_array_almost_equal(clf.coef_, [0.45454], 3)
    assert_array_almost_equal(pred, [0.9090, 1.3636, 1.8181], 3)
    assert_almost_equal(clf.dual_gap_, 0)
Ejemplo n.º 6
0
def test_warm_start():
    X, y, _, _ = build_dataset()
    # Test that explicit warm restart...
    clf = ElasticNet(alpha=1.0, max_iter=50)
    clf.fit(X, y)

    clf2 = ElasticNet(alpha=0.1, max_iter=50)
    clf2.fit(X, y, coef_init=clf.coef_.copy())

    # ... and implicit warm restart are equivalent.
    clf3 = ElasticNet(alpha=1.0, max_iter=50, warm_start=True)
    clf3.fit(X, y)

    assert_array_almost_equal(clf3.coef_, clf.coef_)

    clf3.set_params(alpha=0.1)
    clf3.fit(X, y)

    assert_array_almost_equal(clf3.coef_, clf2.coef_)
def test_warm_start():
    X, y, _, _ = build_dataset()
    # Test that explicit warm restart...
    clf = ElasticNet(alpha=1.0, max_iter=50)
    clf.fit(X, y)

    clf2 = ElasticNet(alpha=0.1, max_iter=50)
    clf2.fit(X, y, coef_init=clf.coef_.copy())

    #... and implicit warm restart are equivalent.
    clf3 = ElasticNet(alpha=1.0, max_iter=50, warm_start=True)
    clf3.fit(X, y)

    assert_array_almost_equal(clf3.coef_, clf.coef_)

    clf3.set_params(alpha=0.1)
    clf3.fit(X, y)

    assert_array_almost_equal(clf3.coef_, clf2.coef_)
def test_warm_start_convergence():
    X, y, _, _ = build_dataset()
    model = ElasticNet(alpha=1e-3, tol=1e-3).fit(X, y)
    n_iter_reference = model.n_iter_

    # This dataset is not trivial enough for the model to converge in one pass.
    assert_greater(n_iter_reference, 2)

    # Check that n_iter_ is invariant to multiple calls to fit
    # when warm_start=False, all else being equal.
    model.fit(X, y)
    n_iter_cold_start = model.n_iter_
    assert_equal(n_iter_cold_start, n_iter_reference)

    # Fit the same model again, using a warm start: the optimizer just performs
    # a single pass before checking that it has already converged
    model.set_params(warm_start=True)
    model.fit(X, y)
    n_iter_warm_start = model.n_iter_
    assert_equal(n_iter_warm_start, 1)
def test_warm_start_convergence():
    X, y, _, _ = build_dataset()
    model = ElasticNet(alpha=1e-3, tol=1e-3).fit(X, y)
    n_iter_reference = model.n_iter_

    # This dataset is not trivial enough for the model to converge in one pass.
    assert_greater(n_iter_reference, 2)

    # Check that n_iter_ is invariant to multiple calls to fit
    # when warm_start=False, all else being equal.
    model.fit(X, y)
    n_iter_cold_start = model.n_iter_
    assert_equal(n_iter_cold_start, n_iter_reference)

    # Fit the same model again, using a warm start: the optimizer just performs
    # a single pass before checking that it has already converged
    model.set_params(warm_start=True)
    model.fit(X, y)
    n_iter_warm_start = model.n_iter_
    assert_equal(n_iter_warm_start, 1)