コード例 #1
0
def test_cross_val_criterion(model_name, criterion):
    # verify dtype from criterion, and the good shape
    algo = Forward()
    monitor_get_val = Monitor()
    monitor_get_val_grad = Monitor()

    model = models[model_name]
    for log_alpha in dict_list_log_alphas[model_name]:
        criterion.get_val(model,
                          X,
                          y,
                          log_alpha,
                          tol=tol,
                          monitor=monitor_get_val)
        criterion.get_val_grad(model,
                               X,
                               y,
                               log_alpha,
                               algo.get_beta_jac_v,
                               tol=tol,
                               monitor=monitor_get_val_grad)

    obj_val = np.array(monitor_get_val.objs)
    obj_val_grad = np.array(monitor_get_val_grad.objs)

    np.testing.assert_allclose(obj_val, obj_val_grad)
コード例 #2
0
def test_grad_search(model, crit):
    """check that the paths are the same in the line search"""
    n_outer = 2

    criterion = HeldOutLogistic(X_val, y_val, model)
    monitor1 = Monitor()
    algo = Forward()
    grad_search(algo, criterion, log_alpha, monitor1, n_outer=n_outer,
                tol=tol)

    criterion = HeldOutLogistic(X_val, y_val, model)
    monitor2 = Monitor()
    algo = Implicit()
    grad_search(algo, criterion, log_alpha, monitor2, n_outer=n_outer,
                tol=tol)

    criterion = HeldOutLogistic(X_val, y_val, model)
    monitor3 = Monitor()
    algo = ImplicitForward(tol_jac=tol, n_iter_jac=5000)
    grad_search(algo, criterion, log_alpha, monitor3, n_outer=n_outer,
                tol=tol)

    assert np.allclose(
        np.array(monitor1.log_alphas), np.array(monitor3.log_alphas))
    assert np.allclose(
        np.array(monitor1.grads), np.array(monitor3.grads), atol=1e-4)
    assert np.allclose(
        np.array(monitor1.objs), np.array(monitor3.objs))
    assert not np.allclose(
        np.array(monitor1.times), np.array(monitor3.times))
コード例 #3
0
def test_grid_search():
    monitor = Monitor()
    grid_searchCV(X_train,
                  y_train,
                  log_alphas,
                  X_test,
                  y_test,
                  X_test,
                  y_test,
                  tol,
                  monitor,
                  sk=False)
    monitor_sparse = Monitor()
    grid_searchCV(X_train_s,
                  y_train,
                  log_alphas,
                  X_test_s,
                  y_test,
                  X_test_s,
                  y_test,
                  tol,
                  monitor_sparse,
                  sk=False)

    assert np.allclose(monitor.objs, monitor_sparse.objs)
コード例 #4
0
def test_grad_search(Optimizer, model, crit):
    """check that the paths are the same in the line search"""
    n_outer = 2

    criterion = HeldOutMSE(idx_train, idx_val)
    monitor1 = Monitor()
    algo = Forward()
    optimizer = Optimizer(n_outer=n_outer, tol=1e-16)
    grad_search(algo, criterion, model, optimizer, X, y, alpha0, monitor1)

    criterion = HeldOutMSE(idx_train, idx_val)
    monitor2 = Monitor()
    algo = Implicit()
    optimizer = Optimizer(n_outer=n_outer, tol=1e-16)
    grad_search(algo, criterion, model, optimizer, X, y, alpha0, monitor2)

    criterion = HeldOutMSE(idx_train, idx_val)
    monitor3 = Monitor()
    algo = ImplicitForward(tol_jac=1e-8, n_iter_jac=5000)
    optimizer = Optimizer(n_outer=n_outer, tol=1e-16)
    grad_search(algo, criterion, model, optimizer, X, y, alpha0, monitor3)

    np.testing.assert_allclose(np.array(monitor1.alphas),
                               np.array(monitor3.alphas))
    np.testing.assert_allclose(np.array(monitor1.grads),
                               np.array(monitor3.grads),
                               rtol=1e-5)
    np.testing.assert_allclose(np.array(monitor1.objs),
                               np.array(monitor3.objs))
    assert not np.allclose(np.array(monitor1.times), np.array(monitor3.times))
コード例 #5
0
def test_grad_search(model):
    # criterion = SURE(
    #     X_train, y_train, model, sigma=sigma_star, X_test=X_test,
    #     y_test=y_test)
    n_outer = 3
    criterion = HeldOutSmoothedHinge(X_val,
                                     y_val,
                                     model,
                                     X_test=None,
                                     y_test=None)
    monitor1 = Monitor()
    algo = Forward()
    grad_search(algo,
                criterion,
                np.log(1e-3),
                monitor1,
                n_outer=n_outer,
                tol=1e-13)

    # criterion = SURE(
    #     X_train, y_train, model, sigma=sigma_star, X_test=X_test,
    #     y_test=y_test)
    criterion = HeldOutSmoothedHinge(X_val,
                                     y_val,
                                     model,
                                     X_test=None,
                                     y_test=None)
    monitor2 = Monitor()
    algo = Implicit()
    grad_search(algo,
                criterion,
                np.log(1e-3),
                monitor2,
                n_outer=n_outer,
                tol=1e-13)

    # criterion = SURE(
    #     X_train, y_train, model, sigma=sigma_star, X_test=X_test,
    #     y_test=y_test)
    criterion = HeldOutSmoothedHinge(X_val,
                                     y_val,
                                     model,
                                     X_test=None,
                                     y_test=None)
    monitor3 = Monitor()
    algo = ImplicitForward(tol_jac=1e-6, n_iter_jac=100)
    grad_search(algo,
                criterion,
                np.log(1e-3),
                monitor3,
                n_outer=n_outer,
                tol=1e-13)

    assert np.allclose(np.array(monitor1.log_alphas),
                       np.array(monitor3.log_alphas))
    assert np.allclose(np.array(monitor1.grads), np.array(monitor3.grads))
    assert np.allclose(np.array(monitor1.objs), np.array(monitor3.objs))
    # assert np.allclose(
    #     np.array(monitor1.objs_test), np.array(monitor3.objs_test))
    assert not np.allclose(np.array(monitor1.times), np.array(monitor3.times))
コード例 #6
0
def test_our_vs_sklearn():
    monitor_grid = Monitor()
    monitor_grid_sk = Monitor()
    for i in range(n_alphas):
        # one versus all (ovr) logreg from scikit learn
        p_alpha = p_alphas[:, i]
        lr = LogisticRegression(solver='saga',
                                multi_class='ovr',
                                penalty='l1',
                                max_iter=max_iter,
                                random_state=42,
                                fit_intercept=False,
                                warm_start=True,
                                C=1 /
                                (alpha_max * p_alpha[0] * len(idx_train)),
                                tol=tol)
        lr.fit(X[idx_train, :], y[idx_train])
        y_pred_val = lr.predict(X[idx_val, :])
        accuracy_val = sklearn.metrics.accuracy_score(y_pred_val, y[idx_val])
        print("accuracy validation (scikit) %f " % accuracy_val)

        monitor_grid_sk(None, None, acc_val=accuracy_val)
        log_alpha_i = np.log(alpha_max * p_alpha)
        # our one verus all
        val, grad = logit_multiclass.get_val_grad(model, X, y, log_alpha_i,
                                                  None, monitor_grid, tol)
        print("accuracy validation (our) %f " % monitor_grid.acc_vals[-1])

    np.testing.assert_allclose(np.array(monitor_grid.acc_vals),
                               np.array(monitor_grid_sk.acc_vals))
コード例 #7
0
def test_grad_search(model, crit):
    """check that the paths are the same in the line search"""
    if crit == 'cv':
        n_outer = 2
        criterion = HeldOutMSE(idx_train, idx_val)
    else:
        n_outer = 2
        criterion = SmoothedSURE(sigma_star)
    # TODO MM@QBE if else scheme surprising

    criterion = HeldOutMSE(idx_train, idx_val)
    monitor1 = Monitor()
    algo = Forward()
    grad_search(algo,
                criterion,
                model,
                X,
                y,
                log_alpha,
                monitor1,
                n_outer=n_outer,
                tol=1e-16)

    criterion = HeldOutMSE(idx_train, idx_val)
    monitor2 = Monitor()
    algo = Implicit()
    grad_search(algo,
                criterion,
                model,
                X,
                y,
                log_alpha,
                monitor2,
                n_outer=n_outer,
                tol=1e-16)

    criterion = HeldOutMSE(idx_train, idx_val)
    monitor3 = Monitor()
    algo = ImplicitForward(tol_jac=1e-8, n_iter_jac=5000)
    grad_search(algo,
                criterion,
                model,
                X,
                y,
                log_alpha,
                monitor3,
                n_outer=n_outer,
                tol=1e-16)

    np.testing.assert_allclose(np.array(monitor1.log_alphas),
                               np.array(monitor3.log_alphas))
    np.testing.assert_allclose(np.array(monitor1.grads),
                               np.array(monitor3.grads),
                               atol=1e-8)
    np.testing.assert_allclose(np.array(monitor1.objs),
                               np.array(monitor3.objs))
    assert not np.allclose(np.array(monitor1.times), np.array(monitor3.times))
コード例 #8
0
def test_grad_search(model, crit):
    """check that the paths are the same in the line search"""
    if crit == 'cv':
        n_outer = 2
        criterion = HeldOutMSE(X_val,
                               y_val,
                               model,
                               X_test=X_test,
                               y_test=y_test)
    else:
        n_outer = 2
        criterion = SURE(X_train,
                         y_train,
                         model,
                         sigma=sigma_star,
                         X_test=X_test,
                         y_test=y_test)

    criterion = HeldOutMSE(X_val, y_val, model, X_test=X_test, y_test=y_test)
    monitor1 = Monitor()
    algo = Forward()
    grad_search(algo,
                criterion,
                log_alpha,
                monitor1,
                n_outer=n_outer,
                tol=1e-16)

    criterion = HeldOutMSE(X_val, y_val, model, X_test=X_test, y_test=y_test)
    monitor2 = Monitor()
    algo = Implicit()
    grad_search(algo,
                criterion,
                log_alpha,
                monitor2,
                n_outer=n_outer,
                tol=1e-16)

    criterion = HeldOutMSE(X_val, y_val, model, X_test=X_test, y_test=y_test)
    monitor3 = Monitor()
    algo = ImplicitForward(tol_jac=1e-8, n_iter_jac=5000)
    grad_search(algo,
                criterion,
                log_alpha,
                monitor3,
                n_outer=n_outer,
                tol=1e-16)

    assert np.allclose(np.array(monitor1.log_alphas),
                       np.array(monitor3.log_alphas))
    assert np.allclose(np.array(monitor1.grads), np.array(monitor3.grads))
    assert np.allclose(np.array(monitor1.objs), np.array(monitor3.objs))
    assert np.allclose(np.array(monitor1.objs_test),
                       np.array(monitor3.objs_test))
    assert not np.allclose(np.array(monitor1.times), np.array(monitor3.times))
コード例 #9
0
ファイル: test_elastic.py プロジェクト: svaiter/sparse-ho
def test_grad_search():

    n_outer = 3
    criterion = HeldOutMSE(X_val, y_val, model, X_test=None, y_test=None)
    monitor1 = Monitor()
    algo = Forward()
    grad_search(algo,
                criterion,
                np.array([log_alpha1, log_alpha2]),
                monitor1,
                n_outer=n_outer,
                tol=1e-16)

    criterion = HeldOutMSE(X_val, y_val, model, X_test=None, y_test=None)
    monitor2 = Monitor()
    algo = Implicit()
    grad_search(algo,
                criterion,
                np.array([log_alpha1, log_alpha2]),
                monitor2,
                n_outer=n_outer,
                tol=1e-16)

    criterion = HeldOutMSE(X_val, y_val, model, X_test=None, y_test=None)
    monitor3 = Monitor()
    algo = ImplicitForward(tol_jac=1e-3, n_iter_jac=1000)
    grad_search(algo,
                criterion,
                np.array([log_alpha1, log_alpha2]),
                monitor3,
                n_outer=n_outer,
                tol=1e-16)
    [np.linalg.norm(grad) for grad in monitor1.grads]
    [np.exp(alpha) for alpha in monitor1.log_alphas]

    assert np.allclose(np.array(monitor1.log_alphas),
                       np.array(monitor3.log_alphas))
    assert np.allclose(np.array(monitor1.grads), np.array(monitor3.grads))
    assert np.allclose(np.array(monitor1.objs), np.array(monitor3.objs))
    assert not np.allclose(np.array(monitor1.times), np.array(monitor3.times))

    assert np.allclose(np.array(monitor1.log_alphas),
                       np.array(monitor2.log_alphas),
                       atol=1e-2)
    assert np.allclose(np.array(monitor1.grads),
                       np.array(monitor2.grads),
                       atol=1e-2)
    assert np.allclose(np.array(monitor1.objs),
                       np.array(monitor2.objs),
                       atol=1e-2)
    assert not np.allclose(np.array(monitor1.times), np.array(monitor2.times))
コード例 #10
0
def test_grad_search(model):
    n_outer = 3
    criterion = HeldOutSmoothedHinge(idx_train, idx_val)
    monitor1 = Monitor()
    algo = Forward()
    grad_search(algo,
                criterion,
                model,
                X,
                y,
                np.log(1e-3),
                monitor1,
                n_outer=n_outer,
                tol=1e-13)

    criterion = HeldOutSmoothedHinge(idx_train, idx_val)
    monitor2 = Monitor()
    algo = Implicit()
    grad_search(algo,
                criterion,
                model,
                X,
                y,
                np.log(1e-3),
                monitor2,
                n_outer=n_outer,
                tol=1e-13)

    criterion = HeldOutSmoothedHinge(idx_train, idx_val)
    monitor3 = Monitor()
    algo = ImplicitForward(tol_jac=1e-6, n_iter_jac=100)
    grad_search(algo,
                criterion,
                model,
                X,
                y,
                np.log(1e-3),
                monitor3,
                n_outer=n_outer,
                tol=1e-13)

    assert np.allclose(np.array(monitor1.log_alphas),
                       np.array(monitor3.log_alphas))
    assert np.allclose(np.array(monitor1.grads), np.array(monitor3.grads))
    assert np.allclose(np.array(monitor1.objs), np.array(monitor3.objs))
    # assert np.allclose(
    #     np.array(monitor1.objs_test), np.array(monitor3.objs_test))
    assert not np.allclose(np.array(monitor1.times), np.array(monitor3.times))
コード例 #11
0
def test_cross_val_criterion():
    alpha_min = alpha_max / 10
    log_alpha_max = np.log(alpha_max)
    log_alpha_min = np.log(alpha_min)
    max_iter = 10000
    n_alphas = 10
    kf = KFold(n_splits=5, shuffle=True, random_state=56)

    estimator = sklearn.linear_model.Lasso(fit_intercept=False,
                                           max_iter=1000,
                                           warm_start=True)
    monitor_grid = Monitor()
    criterion = CrossVal(X, y, Lasso, cv=kf, estimator=estimator)
    algo = Forward()
    grid_search(algo,
                criterion,
                log_alpha_min,
                log_alpha_max,
                monitor_grid,
                max_evals=n_alphas,
                tol=tol)

    reg = LassoCV(cv=kf,
                  verbose=True,
                  tol=tol,
                  fit_intercept=False,
                  alphas=np.geomspace(alpha_max, alpha_min, num=n_alphas),
                  max_iter=max_iter).fit(X, y)
    reg.score(X, y)
    objs_grid_sk = reg.mse_path_.mean(axis=1)
    # these 2 value should be the same
    (objs_grid_sk - np.array(monitor_grid.objs))
    assert np.allclose(objs_grid_sk, monitor_grid.objs)
コード例 #12
0
ファイル: bayesian.py プロジェクト: agramfort/sparse-ho
def hyperopt_lasso(
        X_train, y_train, log_alpha, X_val, y_val, X_test, y_test, tol,
        maxit=1000, max_evals=30, method="bayesian", criterion="cv", sigma=1.0,
        beta_star=None):
    n_samples, n_features = X_train.shape
    alpha_max = np.abs((X_train.T @ y_train)).max() / n_samples

    space = hp.uniform(
        'log_alpha', np.log(alpha_max / 1000), np.log(alpha_max))

    monitor = Monitor()
    warm_start = WarmStart()

    if criterion == "cv":
        def objective(log_alpha):
            value = get_val_grad(
                X_train, y_train, log_alpha, X_val, y_val, X_test, y_test, tol,
                monitor, warm_start, method="hyperopt", maxit=1000,
                model="lasso", beta_star=beta_star)
            return value
    elif criterion == "sure":
        def objective(log_alpha):
            value = get_val_grad(
                X_train, y_train, log_alpha, X_val, y_val, X_test, y_test, tol,
                monitor, warm_start, method="hyperopt", maxit=1000,
                model="lasso", criterion="sure", sigma=sigma,
                beta_star=beta_star)
            return value

    if method == "bayesian":
        best = fmin(objective, space, algo=tpe.suggest, max_evals=max_evals)
    elif method == "random":
        best = fmin(objective, space, algo=rand.suggest, max_evals=max_evals)
    return monitor
コード例 #13
0
ファイル: test_grid_search.py プロジェクト: QB3/sparse-ho-qbe
def test_grid_search():
    max_evals = 5

    monitor_grid = Monitor()
    model = Lasso(estimator=estimator)
    criterion = HeldOutMSE(idx_train, idx_train)
    algo = Forward()
    log_alpha_opt_grid, _ = grid_search(
        algo, criterion, model, X, y, log_alpha_min, log_alpha_max,
        monitor_grid, max_evals=max_evals,
        tol=1e-5, samp="grid")

    monitor_random = Monitor()
    criterion = HeldOutMSE(idx_train, idx_val)
    algo = Forward()
    log_alpha_opt_random, _ = grid_search(
        algo, criterion, model, X, y, log_alpha_min, log_alpha_max,
        monitor_random,
        max_evals=max_evals, tol=1e-5, samp="random")

    assert(monitor_random.log_alphas[
        np.argmin(monitor_random.objs)] == log_alpha_opt_random)
    assert(monitor_grid.log_alphas[
        np.argmin(monitor_grid.objs)] == log_alpha_opt_grid)

    monitor_grid = Monitor()
    model = Lasso(estimator=estimator)

    criterion = SmoothedSURE(sigma=sigma_star)
    algo = Forward()
    log_alpha_opt_grid, _ = grid_search(
        algo, criterion, model, X, y, log_alpha_min, log_alpha_max,
        monitor_grid, max_evals=max_evals,
        tol=1e-5, samp="grid")

    monitor_random = Monitor()
    criterion = SmoothedSURE(sigma=sigma_star)
    algo = Forward()
    log_alpha_opt_random, _ = grid_search(
        algo, criterion, model, X, y, log_alpha_min, log_alpha_max,
        monitor_random,
        max_evals=max_evals, tol=1e-5, samp="random")

    assert(monitor_random.log_alphas[
        np.argmin(monitor_random.objs)] == log_alpha_opt_random)
    assert(monitor_grid.log_alphas[
        np.argmin(monitor_grid.objs)] == log_alpha_opt_grid)
コード例 #14
0
ファイル: test_utils.py プロジェクト: QB3/sparse-ho
def test_monitor():
    model = Lasso(estimator=estimator)
    criterion = HeldOutMSE(idx_train, idx_val)
    algo = ImplicitForward()
    monitor = Monitor(callback=callback)
    optimizer = LineSearch(n_outer=10, tol=tol)
    grad_search(algo, criterion, model, optimizer, X, y, alpha0, monitor)

    np.testing.assert_allclose(np.array(monitor.objs), np.array(objs))
コード例 #15
0
def test_grad_search_custom(model, model_custom, crit):
    """check that the paths are the same in the line search"""
    n_outer = 5

    criterion = HeldOutLogistic(idx_val, idx_val)
    monitor = Monitor()
    algo = ImplicitForward(tol_jac=tol, n_iter_jac=5000)
    grad_search(algo,
                criterion,
                model,
                X,
                y,
                log_alpha,
                monitor,
                n_outer=n_outer,
                tol=tol)

    criterion = HeldOutLogistic(idx_val, idx_val)
    monitor_custom = Monitor()
    algo = ImplicitForward(tol_jac=tol, n_iter_jac=5000)
    grad_search(algo,
                criterion,
                model_custom,
                X,
                y,
                log_alpha,
                monitor_custom,
                n_outer=n_outer,
                tol=tol)

    np.testing.assert_allclose(np.array(monitor.log_alphas),
                               np.array(monitor_custom.log_alphas),
                               atol=1e-3)
    np.testing.assert_allclose(np.array(monitor.grads),
                               np.array(monitor_custom.grads),
                               atol=1e-4)
    np.testing.assert_allclose(np.array(monitor.objs),
                               np.array(monitor_custom.objs),
                               atol=1e-5)
    assert not np.allclose(np.array(monitor.times),
                           np.array(monitor_custom.times))
コード例 #16
0
def parallel_function(
        dataset_name, method, tol=1e-8, n_outer=15):

    # load data
    X, y = fetch_libsvm(dataset_name)
    # subsample the samples and the features
    n_samples, n_features = dict_subsampling[dataset_name]
    t_max = dict_t_max[dataset_name]
    # t_max = 3600

    X, y = clean_dataset(X, y, n_samples, n_features)
    alpha_max, n_classes = get_alpha_max(X, y)
    log_alpha_max = np.log(alpha_max)  # maybe to change alpha max value

    algo = ImplicitForward(None, n_iter_jac=2000)
    estimator = LogisticRegression(
        C=1, fit_intercept=False, warm_start=True, max_iter=30, verbose=False)

    model = SparseLogreg(estimator=estimator)
    idx_train, idx_val, idx_test = get_splits(X, y)

    logit_multiclass = LogisticMulticlass(
        idx_train, idx_val, algo, idx_test=idx_test)

    monitor = Monitor()
    if method == "implicit_forward":
        log_alpha0 = np.ones(n_classes) * np.log(0.1 * alpha_max)
        optimizer = LineSearch(n_outer=100)
        grad_search(
            algo, logit_multiclass, model, optimizer, X, y, log_alpha0,
            monitor)
    elif method.startswith(('random', 'bayesian')):
        max_evals = dict_max_eval[dataset_name]
        log_alpha_min = np.log(alpha_max) - 7
        hyperopt_wrapper(
            algo, logit_multiclass, model, X, y, log_alpha_min, log_alpha_max,
            monitor, max_evals=max_evals, tol=tol, t_max=t_max, method=method,
            size_space=n_classes)
    elif method == 'grid_search':
        n_alphas = 20
        p_alphas = np.geomspace(1, 0.001, n_alphas)
        p_alphas = np.tile(p_alphas, (n_classes, 1))
        for i in range(n_alphas):
            log_alpha_i = np.log(alpha_max * p_alphas[:, i])
            logit_multiclass.get_val(
                model, X, y, log_alpha_i, None, monitor, tol)

    monitor.times = np.array(monitor.times).copy()
    monitor.objs = np.array(monitor.objs).copy()
    monitor.acc_vals = np.array(monitor.acc_vals).copy()
    monitor.acc_tests = np.array(monitor.acc_tests).copy()
    monitor.log_alphas = np.array(monitor.log_alphas).copy()
    return (
        dataset_name, method, tol, n_outer, monitor.times, monitor.objs,
        monitor.acc_vals, monitor.acc_tests, monitor.log_alphas, log_alpha_max,
        n_samples, n_features, n_classes)
コード例 #17
0
def test_cross_val_criterion(model_name, XX):
    model = models[model_name]
    alpha_min = alpha_max / 10
    max_iter = 10000
    n_alphas = 10
    kf = KFold(n_splits=5, shuffle=True, random_state=56)

    monitor_grid = Monitor()
    if model_name.startswith("lasso"):
        sub_crit = HeldOutMSE(None, None)
    else:
        sub_crit = HeldOutLogistic(None, None)
    criterion = CrossVal(sub_crit, cv=kf)
    grid_search(criterion,
                model,
                XX,
                y,
                alpha_min,
                alpha_max,
                monitor_grid,
                max_evals=n_alphas,
                tol=tol)

    if model_name.startswith("lasso"):
        reg = linear_model.LassoCV(cv=kf,
                                   verbose=True,
                                   tol=tol,
                                   fit_intercept=False,
                                   alphas=np.geomspace(alpha_max,
                                                       alpha_min,
                                                       num=n_alphas),
                                   max_iter=max_iter).fit(X, y)
    else:
        reg = linear_model.LogisticRegressionCV(
            cv=kf,
            verbose=True,
            tol=tol,
            fit_intercept=False,
            Cs=len(idx_train) /
            np.geomspace(alpha_max, alpha_min, num=n_alphas),
            max_iter=max_iter,
            penalty='l1',
            solver='liblinear').fit(X, y)
    reg.score(XX, y)
    if model_name.startswith("lasso"):
        objs_grid_sk = reg.mse_path_.mean(axis=1)
    else:
        objs_grid_sk = reg.scores_[1.0].mean(axis=1)
    # these 2 value should be the same
    (objs_grid_sk - np.array(monitor_grid.objs))
    np.testing.assert_allclose(objs_grid_sk, monitor_grid.objs)
コード例 #18
0
def test_grad_search(model, crit):
    """check that the paths are the same in the line search"""
    if crit == 'MSE':
        n_outer = 2
        criterion = HeldOutMSE(idx_train, idx_val)
    else:
        n_outer = 2
        criterion = FiniteDiffMonteCarloSure(sigma_star)
    # TODO MM@QBE if else scheme surprising

    criterion = HeldOutMSE(idx_train, idx_val)
    monitor1 = Monitor()
    algo = Forward()
    optimizer = LineSearch(n_outer=n_outer, tol=1e-16)
    grad_search(algo, criterion, model, optimizer, X, y, alpha0, monitor1)

    criterion = HeldOutMSE(idx_train, idx_val)
    monitor2 = Monitor()
    algo = Implicit()
    optimizer = LineSearch(n_outer=n_outer, tol=1e-16)
    grad_search(algo, criterion, model, optimizer, X, y, alpha0, monitor2)

    criterion = HeldOutMSE(idx_train, idx_val)
    monitor3 = Monitor()
    algo = ImplicitForward(tol_jac=1e-8, n_iter_jac=5000)
    optimizer = LineSearch(n_outer=n_outer, tol=1e-16)
    grad_search(algo, criterion, model, optimizer, X, y, alpha0, monitor3)

    np.testing.assert_allclose(np.array(monitor1.alphas),
                               np.array(monitor3.alphas))
    np.testing.assert_allclose(np.array(monitor1.grads),
                               np.array(monitor3.grads),
                               rtol=1e-5)
    np.testing.assert_allclose(np.array(monitor1.objs),
                               np.array(monitor3.objs))
    assert not np.allclose(np.array(monitor1.times), np.array(monitor3.times))
コード例 #19
0
def test_monitor():
    model = Lasso(estimator=estimator)
    criterion = HeldOutMSE(idx_train, idx_val)
    algo = ImplicitForward()
    monitor = Monitor(callback=callback)

    grad_search(algo,
                criterion,
                model,
                X,
                y,
                np.log(alpha_max / 10),
                monitor,
                n_outer=10,
                tol=tol)

    np.testing.assert_allclose(np.array(monitor.objs), np.array(objs))
コード例 #20
0
model = SparseLogreg(estimator=estimator)
logit_multiclass = LogisticMulticlass(
    idx_train, idx_val, algo, idx_test=idx_test)


alpha_max, n_classes = alpha_max_multiclass(X, y)
tol = 1e-5


n_alphas = 10
p_alphas = np.geomspace(1, 0.001, n_alphas)
p_alphas = np.tile(p_alphas, (n_classes, 1))

print("###################### GRID SEARCH ###################")
monitor_grid = Monitor()
for i in range(n_alphas):
    log_alpha_i = np.log(alpha_max * p_alphas[:, i])
    logit_multiclass.get_val(
        model, X, y, log_alpha_i, None, monitor_grid, tol)

1/0
print("###################### GRAD SEARCH LS ###################")
n_outer = 100
model = SparseLogreg(estimator=estimator)
logit_multiclass = LogisticMulticlass(idx_train, idx_val, idx_test, algo)

monitor = Monitor()
log_alpha0 = np.ones(n_classes) * np.log(0.1 * alpha_max)

idx_min = np.argmin(np.array(monitor_grid.objs))
コード例 #21
0
n_samples = len(y)

sss1 = StratifiedShuffleSplit(n_splits=2, test_size=0.3333, random_state=0)
idx_train, idx_val = sss1.split(X, y)
idx_train = idx_train[0]
idx_val = idx_val[0]

dict_res = {}

for maxit in maxits:
    for method in methods:
        print("Dataset %s, maxit %i" % (method, maxit))
        for i in range(2):
            alpha_max = np.max(np.abs(X.T.dot(y))) / n_samples
            log_alpha = np.log(alpha_max * p_alpha_max)
            monitor = Monitor()
            if method == "celer":
                clf = Lasso_celer(alpha=np.exp(log_alpha),
                                  fit_intercept=False,
                                  tol=1e-12,
                                  max_iter=maxit)
                model = Lasso(estimator=clf, max_iter=maxit)
                criterion = HeldOutMSE(idx_train, idx_val)
                algo = ImplicitForward(tol_jac=1e-32,
                                       n_iter_jac=maxit,
                                       use_stop_crit=False)
                algo.max_iter = maxit
                val, grad = criterion.get_val_grad(model,
                                                   X,
                                                   y,
                                                   log_alpha,
コード例 #22
0
log_alphas = np.log(alphas)

##############################################################################
# Grid-search
# -----------

print('scikit started')
t0 = time.time()

estimator = LogisticRegression(penalty='l1',
                               fit_intercept=False,
                               max_iter=max_iter)
model = SparseLogreg(max_iter=max_iter, estimator=estimator)
criterion = HeldOutLogistic(idx_train, idx_val)
algo_grid = Forward()
monitor_grid = Monitor()
grid_search(algo_grid,
            criterion,
            model,
            X,
            y,
            log_alpha_min,
            log_alpha_max,
            monitor_grid,
            log_alphas=log_alphas,
            tol=tol)
objs = np.array(monitor_grid.objs)

t_sk = time.time() - t0

print('scikit finished')
コード例 #23
0
def parallel_function(dataset_name,
                      method,
                      tol=1e-5,
                      n_outer=50,
                      tolerance_decrease='constant'):

    # load data
    X, y = fetch_libsvm(dataset_name)
    y -= np.mean(y)
    # compute alpha_max
    alpha_max = np.abs(X.T @ y).max() / len(y)

    if model_name == "logreg":
        alpha_max /= 2
    alpha_min = alpha_max * dict_palphamin[dataset_name]

    if model_name == "enet":
        estimator = linear_model.ElasticNet(fit_intercept=False,
                                            max_iter=10_000,
                                            warm_start=True,
                                            tol=tol)
        model = ElasticNet(estimator=estimator)
    elif model_name == "logreg":
        model = SparseLogreg(estimator=estimator)

    # TODO improve this
    try:
        n_outer = dict_n_outers[dataset_name, method]
    except Exception:
        n_outer = 20

    size_loop = 2
    for _ in range(size_loop):
        if model_name == "lasso" or model_name == "enet":
            sub_criterion = HeldOutMSE(None, None)
        elif model_name == "logreg":
            criterion = HeldOutLogistic(None, None)
        kf = KFold(n_splits=5, shuffle=True, random_state=42)
        criterion = CrossVal(sub_criterion, cv=kf)

        algo = ImplicitForward(tol_jac=1e-3)
        monitor = Monitor()
        t_max = dict_t_max[dataset_name]
        if method == 'grid_search':
            num1D = dict_point_grid_search[dataset_name]
            alpha1D = np.geomspace(alpha_max, alpha_min, num=num1D)
            alphas = [np.array(i) for i in product(alpha1D, alpha1D)]
            grid_search(algo,
                        criterion,
                        model,
                        X,
                        y,
                        alpha_min,
                        alpha_max,
                        monitor,
                        max_evals=100,
                        tol=tol,
                        alphas=alphas)
        elif method == 'random' or method == 'bayesian':
            hyperopt_wrapper(algo,
                             criterion,
                             model,
                             X,
                             y,
                             alpha_min,
                             alpha_max,
                             monitor,
                             max_evals=30,
                             tol=tol,
                             method=method,
                             size_space=2,
                             t_max=t_max)
        elif method.startswith("implicit_forward"):
            # do gradient descent to find the optimal lambda
            alpha0 = np.array([alpha_max / 100, alpha_max / 100])
            n_outer = 30
            if method == 'implicit_forward':
                optimizer = GradientDescent(n_outer=n_outer,
                                            p_grad_norm=1,
                                            verbose=True,
                                            tol=tol,
                                            t_max=t_max)
            else:
                optimizer = GradientDescent(n_outer=n_outer,
                                            p_grad_norm=1,
                                            verbose=True,
                                            tol=tol,
                                            t_max=t_max,
                                            tol_decrease="geom")
            grad_search(algo, criterion, model, optimizer, X, y, alpha0,
                        monitor)
        else:
            raise NotImplementedError

    monitor.times = np.array(monitor.times)
    monitor.objs = np.array(monitor.objs)
    monitor.objs_test = 0  # TODO
    monitor.alphas = np.array(monitor.alphas)
    return (dataset_name, method, tol, n_outer, tolerance_decrease,
            monitor.times, monitor.objs, monitor.objs_test, monitor.alphas,
            alpha_max, model_name)
コード例 #24
0
def parallel_function(dataset_name,
                      div_alpha,
                      method,
                      ind_rep,
                      random_state=10):
    maxit = dict_maxits[(dataset_name, div_alpha)][ind_rep]
    print("Dataset %s, algo %s, maxit %i" % (dataset_name, method, maxit))
    X, y = fetch_libsvm(dataset_name)
    n_samples = len(y)

    kf = KFold(n_splits=5, random_state=random_state, shuffle=True)

    for i in range(2):
        alpha_max = np.max(np.abs(X.T.dot(y))) / n_samples
        log_alpha = np.log(alpha_max / div_alpha)
        monitor = Monitor()
        if method == "celer":
            clf = Lasso_celer(
                alpha=np.exp(log_alpha),
                fit_intercept=False,
                # TODO maybe change this tol
                tol=1e-8,
                max_iter=maxit)
            model = Lasso(estimator=clf, max_iter=maxit)
            criterion = HeldOutMSE(None, None)
            cross_val = CrossVal(cv=kf, criterion=criterion)
            algo = ImplicitForward(tol_jac=1e-8,
                                   n_iter_jac=maxit,
                                   use_stop_crit=False)
            algo.max_iter = maxit
            val, grad = cross_val.get_val_grad(model,
                                               X,
                                               y,
                                               log_alpha,
                                               algo.get_beta_jac_v,
                                               tol=tol,
                                               monitor=monitor,
                                               max_iter=maxit)
        elif method == "ground_truth":
            for file in os.listdir("results/"):
                if file.startswith("hypergradient_%s_%i_%s" %
                                   (dataset_name, div_alpha, method)):
                    return
                else:
                    clf = Lasso_celer(alpha=np.exp(log_alpha),
                                      fit_intercept=False,
                                      warm_start=True,
                                      tol=1e-13,
                                      max_iter=10000)
                    criterion = HeldOutMSE(None, None)
                    cross_val = CrossVal(cv=kf, criterion=criterion)
                    algo = Implicit(criterion)
                    model = Lasso(estimator=clf, max_iter=10000)
                    val, grad = cross_val.get_val_grad(model,
                                                       X,
                                                       y,
                                                       log_alpha,
                                                       algo.get_beta_jac_v,
                                                       tol=1e-13,
                                                       monitor=monitor)
        else:
            model = Lasso(max_iter=maxit)
            criterion = HeldOutMSE(None, None)
            cross_val = CrossVal(cv=kf, criterion=criterion)
            if method == "forward":
                algo = Forward(use_stop_crit=False)
            elif method == "implicit_forward":
                algo = ImplicitForward(use_stop_crit=False,
                                       tol_jac=1e-8,
                                       n_iter_jac=maxit,
                                       max_iter=1000)
            elif method == "implicit":
                algo = Implicit(use_stop_crit=False, max_iter=1000)
            elif method == "backward":
                algo = Backward()
            else:
                1 / 0
            algo.max_iter = maxit
            algo.use_stop_crit = False
            val, grad = cross_val.get_val_grad(model,
                                               X,
                                               y,
                                               log_alpha,
                                               algo.get_beta_jac_v,
                                               tol=tol,
                                               monitor=monitor,
                                               max_iter=maxit)

    results = (dataset_name, div_alpha, method, maxit, val, grad,
               monitor.times[0])
    df = pandas.DataFrame(results).transpose()
    df.columns = [
        'dataset', 'div_alpha', 'method', 'maxit', 'val', 'grad', 'time'
    ]
    str_results = "results/hypergradient_%s_%i_%s_%i.pkl" % (
        dataset_name, div_alpha, method, maxit)
    df.to_pickle(str_results)
コード例 #25
0
ファイル: plot_wlasso.py プロジェクト: svaiter/sparse-ho
# Measure mse on test
mse_cv = mean_squared_error(y_test, model_cv.predict(X_test))
print("Vanilla LassoCV: Mean-squared error on test data %f" % mse_cv)
##############################################################################

##############################################################################
# Weighted Lasso with sparse-ho.
# We use the vanilla lassoCV coefficients as a starting point
alpha0 = np.log(model_cv.alpha_) * np.ones(X_train.shape[1])
# Weighted Lasso: Sparse-ho: 1 param per feature
estimator = Lasso(fit_intercept=False, max_iter=10, warm_start=True)
model = WeightedLasso(X_train, y_train, estimator=estimator)
criterion = HeldOutMSE(X_val, y_val, model, X_test=X_test, y_test=y_test)
algo = ImplicitForward()
monitor = Monitor()
grad_search(algo, criterion, alpha0, monitor, n_outer=20, tol=1e-6)
##############################################################################

##############################################################################
# MSE on validation set
mse_sho_val = mean_squared_error(y_val, estimator.predict(X_val))

# MSE on test set, ie unseen data
mse_sho_test = mean_squared_error(y_test, estimator.predict(X_test))

print("Sparse-ho: Mean-squared error on validation data %f" % mse_sho_val)
print("Sparse-ho: Mean-squared error on test (unseen) data %f" % mse_sho_test)

labels = ['WeightedLasso val', 'WeightedLasso test', 'Lasso CV']
コード例 #26
0
def parallel_function(dataset_name, method):
    X, y = fetch_libsvm(dataset_name)
    X, y = fetch_libsvm(dataset_name)
    if dataset_name == "real-sim":
        X = X[:, :2000]
    X = csr_matrix(X)  # very important for SVM
    my_bool = norm(X, axis=1) != 0
    X = X[my_bool, :]
    y = y[my_bool]
    logC = dict_logC[dataset_name]
    for max_iter in dict_max_iter[dataset_name]:
        print("Dataset %s, max iter %i" % (method, max_iter))
        for i in range(2):  # TODO change this
            sss1 = StratifiedShuffleSplit(n_splits=2,
                                          test_size=0.3333,
                                          random_state=0)
            idx_train, idx_val = sss1.split(X, y)
            idx_train = idx_train[0]
            idx_val = idx_val[0]

            monitor = Monitor()
            criterion = HeldOutSmoothedHinge(idx_train, idx_val)
            model = SVM(estimator=None, max_iter=10_000)

            if method == "ground_truth":
                for file in os.listdir("results_svm/"):
                    if file.startswith("hypergradient_svm_%s_%s" %
                                       (dataset_name, method)):
                        return
                clf = LinearSVC(C=np.exp(logC),
                                tol=1e-32,
                                max_iter=10_000,
                                loss='hinge',
                                permute=False)
                algo = Implicit(criterion)
                model.estimator = clf
                val, grad = criterion.get_val_grad(model,
                                                   X,
                                                   y,
                                                   logC,
                                                   algo.compute_beta_grad,
                                                   tol=1e-14,
                                                   monitor=monitor)
            else:
                if method == "sota":
                    clf = LinearSVC(C=np.exp(logC),
                                    loss='hinge',
                                    max_iter=max_iter,
                                    tol=1e-32,
                                    permute=False)
                    model.estimator = clf
                    algo = ImplicitForward(tol_jac=1e-32,
                                           n_iter_jac=max_iter,
                                           use_stop_crit=False)
                elif method == "forward":
                    algo = Forward(use_stop_crit=False)
                elif method == "implicit_forward":
                    algo = ImplicitForward(tol_jac=1e-8,
                                           n_iter_jac=max_iter,
                                           use_stop_crit=False)
                else:
                    raise NotImplementedError
                algo.max_iter = max_iter
                algo.use_stop_crit = False
                val, grad = criterion.get_val_grad(model,
                                                   X,
                                                   y,
                                                   logC,
                                                   algo.compute_beta_grad,
                                                   tol=tol,
                                                   monitor=monitor,
                                                   max_iter=max_iter)

        results = (dataset_name, method, max_iter, val, grad, monitor.times[0])
        df = pandas.DataFrame(results).transpose()
        df.columns = ['dataset', 'method', 'maxit', 'val', 'grad', 'time']
        str_results = "results_svm/hypergradient_svm_%s_%s_%i.pkl" % (
            dataset_name, method, max_iter)
        df.to_pickle(str_results)
コード例 #27
0
##############################################################################
# Grid-search with scikit-learn
# -----------------------------

estimator = linear_model.Lasso(fit_intercept=False,
                               max_iter=1000,
                               warm_start=True)

print('scikit-learn started')

t0 = time.time()
model = Lasso(estimator=estimator)
criterion = HeldOutMSE(idx_train, idx_val)
algo = Forward()
monitor_grid_sk = Monitor()
grid_search(algo,
            criterion,
            model,
            X,
            y,
            None,
            None,
            monitor_grid_sk,
            log_alphas=log_alphas,
            tol=tol)
objs = np.array(monitor_grid_sk.objs)
t_sk = time.time() - t0

print('scikit-learn finished')
コード例 #28
0
ファイル: plot_use_callback.py プロジェクト: Klopfe/sparse-ho

def callback(val, grad, mask, dense, alpha):
    # The custom quantity is added at each outer iteration:
    # here the prediction MSE on test data
    objs_test.append(mean_squared_error(X_test[:, mask] @ dense, y_test))


##############################################################################
# Grad-search with sparse-ho and callback
# ---------------------------------------
model = Lasso(estimator=estimator)
criterion = HeldOutMSE(idx_train, idx_val)
algo = ImplicitForward()
# use Monitor(callback) with your custom callback
monitor = Monitor(callback=callback)
optimizer = LineSearch(n_outer=30)

grad_search(algo, criterion, model, optimizer, X, y, alpha0, monitor)

##############################################################################
# Plot results
# ------------
plt.figure(figsize=(5, 3))
plt.plot(monitor.times, objs_test)
plt.tick_params(width=5)
plt.xlabel("Times (s)")
plt.ylabel(r"$\|y^{\rm{test}} - X^{\rm{test}} \hat \beta^{(\lambda)} \|^2$")
plt.tight_layout()
plt.show(block=False)
コード例 #29
0
def parallel_function(name_model, div_alpha):
    index_col = np.arange(10)
    alpha_max = (np.abs(X[np.ix_(idx_train, index_col)].T
                        @ y[idx_train])).max() / len(idx_train)
    if name_model == "lasso":
        log_alpha = np.log(alpha_max / div_alpha)
    elif name_model == "enet":
        alpha0 = alpha_max / div_alpha
        alpha1 = (1 - l1_ratio) * alpha0 / l1_ratio
        log_alpha = np.log(np.array([alpha0, alpha1]))

    criterion = HeldOutMSE(idx_train, idx_val)
    algo = Forward()
    monitor = Monitor()
    val, grad = criterion.get_val_grad(dict_models[name_model],
                                       X[:, index_col],
                                       y,
                                       log_alpha,
                                       algo.compute_beta_grad,
                                       tol=tol,
                                       monitor=monitor)

    criterion = HeldOutMSE(idx_train, idx_val)
    algo = Backward()
    monitor = Monitor()
    val, grad = criterion.get_val_grad(dict_models[name_model],
                                       X[:, index_col],
                                       y,
                                       log_alpha,
                                       algo.compute_beta_grad,
                                       tol=tol,
                                       monitor=monitor)

    val_cvxpy, grad_cvxpy = dict_cvxpy[name_model](X[:, index_col], y,
                                                   np.exp(log_alpha),
                                                   idx_train, idx_val)

    list_times_fwd = []
    list_times_bwd = []
    list_times_cvxpy = []
    for n_col in dict_ncols[div_alpha]:
        temp_fwd = []
        temp_bwd = []
        temp_cvxpy = []
        for i in range(repeat):

            rng = np.random.RandomState(i)
            index_col = rng.choice(n_features, n_col, replace=False)
            alpha_max = (np.abs(X[np.ix_(idx_train, index_col)].T
                                @ y[idx_train])).max() / len(idx_train)
            if name_model == "lasso":
                log_alpha = np.log(alpha_max / div_alpha)
            elif name_model == "enet":
                alpha0 = alpha_max / div_alpha
                alpha1 = (1 - l1_ratio) * alpha0 / l1_ratio
                log_alpha = np.log(np.array([alpha0, alpha1]))

            criterion = HeldOutMSE(idx_train, idx_val)
            algo = Forward()
            monitor = Monitor()
            val, grad = criterion.get_val_grad(dict_models[name_model],
                                               X[:, index_col],
                                               y,
                                               log_alpha,
                                               algo.compute_beta_grad,
                                               tol=tol,
                                               monitor=monitor)
            temp_fwd.append(monitor.times)

            criterion = HeldOutMSE(idx_train, idx_val)
            algo = Backward()
            monitor = Monitor()
            val, grad = criterion.get_val_grad(dict_models[name_model],
                                               X[:, index_col],
                                               y,
                                               log_alpha,
                                               algo.compute_beta_grad,
                                               tol=tol,
                                               monitor=monitor)
            temp_bwd.append(monitor.times)

            t0 = time.time()
            val_cvxpy, grad_cvxpy = dict_cvxpy[name_model](X[:, index_col], y,
                                                           np.exp(log_alpha),
                                                           idx_train, idx_val)
            temp_cvxpy.append(time.time() - t0)

            print(np.abs(grad - grad_cvxpy * np.exp(log_alpha)))
        list_times_fwd.append(np.mean(np.array(temp_fwd)))
        list_times_bwd.append(np.mean(np.array(temp_bwd)))
        list_times_cvxpy.append(np.mean(np.array(temp_cvxpy)))

    np.save("results/times_%s_forward_%s" % (name_model, div_alpha),
            list_times_fwd)
    np.save("results/times_%s_backward_%s" % (name_model, div_alpha),
            list_times_bwd)
    np.save("results/times_%s_cvxpy_%s" % (name_model, div_alpha),
            list_times_cvxpy)
    np.save("results/nfeatures_%s_%s" % (name_model, div_alpha),
            dict_ncols[div_alpha])
コード例 #30
0
ファイル: plot_enet_pred.py プロジェクト: QB3/sparse-ho
                                    max_iter=1000,
                                    warm_start=True,
                                    tol=tol)

dict_monitor = {}

all_algo_name = ['grid_search']
# , 'implicit_forward', "implicit_forward_approx", 'bayesian']
# , 'random_search']
# all_algo_name = ['random_search']

for algo_name in all_algo_name:
    model = ElasticNet(estimator=estimator)
    sub_criterion = HeldOutMSE(None, None)
    alpha0 = np.array([alpha_max / 10, alpha_max / 10])
    monitor = Monitor()
    kf = KFold(n_splits=5, shuffle=True, random_state=42)
    criterion = CrossVal(sub_criterion, cv=kf)
    algo = ImplicitForward(tol_jac=1e-3)
    # optimizer = LineSearch(n_outer=10, tol=tol)
    if algo_name.startswith('implicit_forward'):
        if algo_name == "implicit_forward_approx":
            optimizer = GradientDescent(n_outer=30,
                                        p_grad_norm=1.,
                                        verbose=True,
                                        tol=tol,
                                        tol_decrease="geom")
        else:
            optimizer = GradientDescent(n_outer=30,
                                        p_grad_norm=1.,
                                        verbose=True,