shape=shape,
                                                r2=.75,
                                                random_seed=1)
X = X3d.reshape((n_samples, np.prod(shape)))
n_train = 100
Xtr = X[:n_train, :]
ytr = y[:n_train]
Xte = X[n_train:, :]
yte = y[n_train:]
alpha = 1.  # global penalty

###############################################################################
# Estimators

# Fit RidgeRegression
rr = estimators.RidgeRegression(l=alpha)
rr.fit(Xtr, ytr)
yte_pred_rr = rr.fit(Xtr, ytr).predict(Xte)

# Fit GraphNet
l1, l2, gn = alpha * np.array((.33, .33, 33))  # l1, l2, gn penalties
A = sparse.vstack(nesterov_tv.linear_operator_from_shape(shape))
enetgn = estimators.LinearRegressionL1L2GraphNet(l1, l2, gn, A)
yte_pred_enetgn = enetgn.fit(Xtr, ytr).predict(Xte)

# Fit LinearRegressionL1L2TV
l1, l2, tv = alpha * np.array((.33, .33, .33))  # l1, l2, tv penalties
Atv = nesterov_tv.linear_operator_from_shape(shape)
enettv = estimators.LinearRegressionL1L2TV(l1,
                                           l2,
                                           tv,
Exemplo n.º 2
0
alpha = 1.

###############################################################################
## Models
###############################################################################
MODELS = collections.OrderedDict()

## l2 + grad_descnt
if has_sklearn:
    MODELS["l2__sklearn"] = \
        sklearn.linear_model.Ridge(alpha=alpha,
                                   fit_intercept=False)

# Parsimony: minimize f(beta, X, y) = - loglik + alpha/2 * ||beta||_1
MODELS["l2__grad_descnt"] = \
    estimators.RidgeRegression(l=alpha, mean=False)

if has_sklearn:
    MODELS["l2_inter__sklearn"] = \
        sklearn.linear_model.Ridge(alpha=alpha,
                                   fit_intercept=True)

MODELS["l2_inter__grad_descnt"] = \
    estimators.RidgeRegression(l=alpha, mean=False,
                               penalty_start=1)

if has_sklearn:
    MODELS["l1__sklearn"] = \
        sklearn.linear_model.Lasso(alpha=alpha / n_train,
                                   fit_intercept=False)
MODELS["l1__fista"] = \
Exemplo n.º 3
0
Agl = gl.linear_operator_from_groups(p, groups=groups, weights=weights)
algorithm = algorithms.proximal.CONESTA(eps=consts.TOLERANCE, max_iter=1200)
enet_gl = estimators.LinearRegressionL1L2GL(0.2,
                                            0.2,
                                            0.2,
                                            Agl,
                                            algorithm=algorithm,
                                            penalty_start=10)
yte_pred_enetgl_res = enet_gl.fit(Xtr_res, ytr_res).predict(Xte_res)
print " r carré vaut", r2_score(yte_res, yte_pred_enetgl_res)

#r carré vaut 0.147265167498

#test without group lasso penalty when using the matrix design as predictors

ridge_es = estimators.RidgeRegression(0.05, penalty_start=10)

yte_pred_ridge_res = ridge_es.fit(Xtr_res, ytr_res).predict(Xte_res)
print " r carré vaut", r2_score(yte_res, yte_pred_ridge_res)

#r carré vaut 0.140534187938

#test without group lasso penalty when using the residualized matrix

ridge_es = estimators.RidgeRegression(0.05, penalty_start=10)

yte_pred_ridge = ridge_es.fit(Xtr, ytr).predict(Xte)
print " r carré vaut", r2_score(yte, yte_pred_ridge)

#r carré vaut -0.520837542958
def mapper(key, output_collector):

    import mapreduce as GLOBAL
    Xtr = GLOBAL.DATA_RESAMPLED["X"][0]
    Xte = GLOBAL.DATA_RESAMPLED["X"][1]
    ytr = GLOBAL.DATA_RESAMPLED["y"][0]
    yte = GLOBAL.DATA_RESAMPLED["y"][1]

    # key = 'enettv_0.01_0.1_0.2'.split("_")
    algo, alpha, l1l2ratio, tvratio = key[0], float(key[1]), float(
        key[2]), float(key[3])

    tv = alpha * tvratio
    l1 = alpha * float(1 - tv) * l1l2ratio
    l2 = alpha * float(1 - tv) * (1 - l1l2ratio)

    print(key, algo, alpha, l1, l2, tv)

    scaler = preprocessing.StandardScaler().fit(Xtr[:, 1:])
    Xtr[:, 1:] = scaler.transform(Xtr[:, 1:])
    Xte[:, 1:] = scaler.transform(Xte[:, 1:])

    if algo == 'enettv':
        conesta = algorithms.proximal.CONESTA(max_iter=10000)
        mod = estimators.LinearRegressionL1L2TV(l1,
                                                l2,
                                                tv,
                                                GLOBAL.Atv,
                                                algorithm=conesta,
                                                penalty_start=penalty_start)
        mod.fit(Xtr, ytr.ravel())
        beta = mod.beta

    elif algo == 'enetgn':
        fista = algorithms.proximal.FISTA(max_iter=5000)
        mod = estimators.LinearRegressionL1L2GraphNet(
            l1,
            l2,
            tv,
            GLOBAL.Agn,
            algorithm=fista,
            penalty_start=penalty_start)
        mod.fit(Xtr, ytr.ravel())
        beta = mod.beta

    elif algo == 'enet':
        fista = algorithms.proximal.FISTA(max_iter=5000)
        mod = estimators.ElasticNet(l1l2ratio,
                                    algorithm=fista,
                                    penalty_start=penalty_start)
        mod.fit(Xtr, ytr.ravel())
        beta = mod.beta

    elif algo == 'Ridge':
        mod = estimators.RidgeRegression(l1l2ratio,
                                         penalty_start=penalty_start)
        mod.fit(Xtr, ytr.ravel())
        beta = mod.beta

    elif algo == 'RidgeAGD':
        mod = estimators.RidgeRegression(l1l2ratio,\
        algorithm=gradient.GradientDescent(max_iter=1000),penalty_start = penalty_start )
        mod.fit(Xtr, ytr.ravel())
        beta = mod.beta

    elif algo == 'linearSklearn':
        mod = linear_model.LinearRegression(fit_intercept=False)
        mod.fit(Xtr, ytr.ravel())
        beta = mod.coef_
        beta = beta.reshape(beta.shape[0], 1)

    elif algo == 'SkRidge':
        mod = linear_model.Ridge(alpha=l1l2ratio, fit_intercept=False)
        mod.fit(Xtr, ytr.ravel())
        beta = mod.coef_
        beta = beta.reshape(beta.shape[0], 1)

    elif algo == 'SkRidgeInt':
        mod = linear_model.Ridge(alpha=l1l2ratio, fit_intercept=True)
        mod.fit(Xtr, ytr.ravel())
        beta = mod.coef_
        beta = beta.reshape(beta.shape[0], 1)
    else:
        raise Exception('Algo%s not handled' % algo)

    y_pred = mod.predict(Xte)
    ret = dict(y_pred=y_pred, y_true=yte, beta=beta)
    if output_collector:
        output_collector.collect(key, ret)
    else:
        return ret