Exemplo n.º 1
0
def test_ridge():
    """

    Ridge from sklearn is the same as the R package penalised
    but differs from glmnet, whose regularisation on beta_0 seems
    to be different as compared to penalized R package and sklearn package.


    glmnet and ElasticNet gives different results and different from 
    Ridge and penalized
    """
    R_code_penalized = """
    library(penalized)

    x = matrix(c(0,1,2,3,0,1,2,4), ncol=2)

    coefficients(
        penalized(c(0,1,2,4), x, lambda1=0,lambda2=1, standardize=F))
    0.04615385  0.40000000  0.63076923

    """
    ridge = linear_model.Ridge(alpha=1)

    alae(ridge.fit(X, Y).coef_, np.array([0.4, 0.63076923]), places=6)
    alae([ridge.fit(X, Y).intercept_], [0.04615384615384599], places=10)
Exemplo n.º 2
0
def test_ridge():
    """

    Ridge from sklearn is the same as the R package penalised
    but differs from glmnet, whose regularisation on beta_0 seems
    to be different as compared to penalized R package and sklearn package.


    glmnet and ElasticNet gives different results and different from 
    Ridge and penalized
    """
    R_code_penalized = """
    library(penalized)

    x = matrix(c(0,1,2,3,0,1,2,4), ncol=2)

    coefficients(
        penalized(c(0,1,2,4), x, lambda1=0,lambda2=1, standardize=F))
    0.04615385  0.40000000  0.63076923

    """ 
    ridge = linear_model.Ridge(alpha=1)

    alae(ridge.fit(X, Y).coef_ , np.array([0.4, 0.63076923]), places=6)
    alae([ridge.fit(X, Y).intercept_], [0.04615384615384599], places=10)
Exemplo n.º 3
0
def test_lasso():
    """

    Lasso, ElasticNet (using l1_ratio=1) and glmnet (alpha=1) gives the same 
    results.

    scikit-learn  Lasso documentation::

        (1 / (2 * n_samples)) * ||y - Xw||^2_2 + alpha * ||w||_1


    In glmnet::

      lambda * [ (1-alpha)/2||beta||_2^2 + alpha||beta||_1. ]
    
    so if we set alpha = 1, the first term (ridge) disappears

    Using the penalised function, we get different results...that is
    intercept is 0.2 and coeff are [0 (0.8857143)
    """

    R_code_glmnet = """
 
    fit = glmnet(x, c(0,1,2,4), alpha=1, lambda=1, standardize=F)
    c(fit$a0, fit$beta)
    $s0
    [1] 0.8
    V1 = ( ., V2 0.5428571)
    """

    lasso = linear_model.Lasso(alpha=1)

    alae(lasso.fit(X, Y).coef_, np.array([0., 0.54285714]), places=6)
    alae([lasso.fit(X, Y).intercept_], [0.8], places=10)

    # The code using ElasticNet class is
    en = linear_model.ElasticNet(l1_ratio=1, alpha=1)
    en.fit(X, Y)

    alae([0.8], [en.intercept_], places=6)
    alae(en.coef_, np.array([0., 0.54285714]), places=6)
Exemplo n.º 4
0
def test_lasso():
    """

    Lasso, ElasticNet (using l1_ratio=1) and glmnet (alpha=1) gives the same 
    results.

    scikit-learn  Lasso documentation::

        (1 / (2 * n_samples)) * ||y - Xw||^2_2 + alpha * ||w||_1


    In glmnet::

      lambda * [ (1-alpha)/2||beta||_2^2 + alpha||beta||_1. ]
    
    so if we set alpha = 1, the first term (ridge) disappears

    Using the penalised function, we get different results...that is
    intercept is 0.2 and coeff are [0 (0.8857143)
    """

    R_code_glmnet = """
 
    fit = glmnet(x, c(0,1,2,4), alpha=1, lambda=1, standardize=F)
    c(fit$a0, fit$beta)
    $s0
    [1] 0.8
    V1 = ( ., V2 0.5428571)
    """

    lasso = linear_model.Lasso(alpha=1)

    alae(lasso.fit(X, Y).coef_ , np.array([0., 0.54285714]), places=6)
    alae([lasso.fit(X, Y).intercept_], [0.8], places=10)

    # The code using ElasticNet class is 
    en = linear_model.ElasticNet(l1_ratio=1, alpha=1)
    en.fit(X, Y)

    alae([0.8], [en.intercept_], places=6)
    alae(en.coef_, np.array([ 0.        ,  0.54285714]), places=6)