def test_n_nonzero_coefs():
    assert_true(
        count_nonzero(orthogonal_mp(X, y[:, 0], n_nonzero_coefs=5)) <= 5)
    assert_true(
        count_nonzero(
            orthogonal_mp(X, y[:,
                               0], n_nonzero_coefs=5, precompute=True)) <= 5)
def test_estimator():
    omp = OrthogonalMatchingPursuit(n_nonzero_coefs=n_nonzero_coefs)
    omp.fit(X, y[:, 0])
    assert_equal(omp.coef_.shape, (n_features, ))
    assert_equal(omp.intercept_.shape, ())
    assert_true(count_nonzero(omp.coef_) <= n_nonzero_coefs)

    omp.fit(X, y)
    assert_equal(omp.coef_.shape, (n_targets, n_features))
    assert_equal(omp.intercept_.shape, (n_targets, ))
    assert_true(count_nonzero(omp.coef_) <= n_targets * n_nonzero_coefs)

    omp.set_params(fit_intercept=False, normalize=False)

    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter('always')
        omp.fit(X, y[:, 0], Gram=G, Xy=Xy[:, 0])
        assert_equal(omp.coef_.shape, (n_features, ))
        assert_equal(omp.intercept_, 0)
        assert_true(count_nonzero(omp.coef_) <= n_nonzero_coefs)
        assert_true(len(w) == 2)

    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter('always')
        omp.fit(X, y, Gram=G, Xy=Xy)
        assert_equal(omp.coef_.shape, (n_targets, n_features))
        assert_equal(omp.intercept_, 0)
        assert_true(count_nonzero(omp.coef_) <= n_targets * n_nonzero_coefs)
        assert_true(len(w) == 2)
예제 #3
0
def test_estimator_shapes():
    omp = OrthogonalMatchingPursuit(n_nonzero_coefs=n_nonzero_coefs)
    omp.fit(X, y[:, 0])
    assert_equal(omp.coef_.shape, (n_features,))
    assert_equal(omp.intercept_.shape, ())
    assert_true(count_nonzero(omp.coef_) <= n_nonzero_coefs)

    omp.fit(X, y)
    assert_equal(omp.coef_.shape, (n_targets, n_features))
    assert_equal(omp.intercept_.shape, (n_targets,))
    assert_true(count_nonzero(omp.coef_) <= n_targets * n_nonzero_coefs)

    omp.fit(X, y[:, 0], Gram=G, Xy=Xy[:, 0])
    assert_equal(omp.coef_.shape, (n_features,))
    assert_equal(omp.intercept_.shape, ())
    assert_true(count_nonzero(omp.coef_) <= n_nonzero_coefs)

    omp.fit(X, y, Gram=G, Xy=Xy)
    assert_equal(omp.coef_.shape, (n_targets, n_features))
    assert_equal(omp.intercept_.shape, (n_targets,))
    assert_true(count_nonzero(omp.coef_) <= n_targets * n_nonzero_coefs)
예제 #4
0
def test_estimator_shapes():
    omp = OrthogonalMatchingPursuit(n_nonzero_coefs=n_nonzero_coefs)
    omp.fit(X, y[:, 0])
    assert_equal(omp.coef_.shape, (n_features, ))
    assert_equal(omp.intercept_.shape, ())
    assert_true(count_nonzero(omp.coef_) <= n_nonzero_coefs)

    omp.fit(X, y)
    assert_equal(omp.coef_.shape, (n_targets, n_features))
    assert_equal(omp.intercept_.shape, (n_targets, ))
    assert_true(count_nonzero(omp.coef_) <= n_targets * n_nonzero_coefs)

    omp.fit(X, y[:, 0], Gram=G, Xy=Xy[:, 0])
    assert_equal(omp.coef_.shape, (n_features, ))
    assert_equal(omp.intercept_.shape, ())
    assert_true(count_nonzero(omp.coef_) <= n_nonzero_coefs)

    omp.fit(X, y, Gram=G, Xy=Xy)
    assert_equal(omp.coef_.shape, (n_targets, n_features))
    assert_equal(omp.intercept_.shape, (n_targets, ))
    assert_true(count_nonzero(omp.coef_) <= n_targets * n_nonzero_coefs)
예제 #5
0
def test_estimator():
    omp = OrthogonalMatchingPursuit(n_nonzero_coefs=n_nonzero_coefs)
    omp.fit(X, y[:, 0])
    assert_equal(omp.coef_.shape, (n_features,))
    assert_equal(omp.intercept_.shape, ())
    assert_true(count_nonzero(omp.coef_) <= n_nonzero_coefs)

    omp.fit(X, y)
    assert_equal(omp.coef_.shape, (n_targets, n_features))
    assert_equal(omp.intercept_.shape, (n_targets,))
    assert_true(count_nonzero(omp.coef_) <= n_targets * n_nonzero_coefs)

    omp.set_params(fit_intercept=False, normalize=False)

    assert_warns(DeprecationWarning, omp.fit, X, y[:, 0], Gram=G, Xy=Xy[:, 0])
    assert_equal(omp.coef_.shape, (n_features,))
    assert_equal(omp.intercept_, 0)
    assert_true(count_nonzero(omp.coef_) <= n_nonzero_coefs)

    assert_warns(DeprecationWarning, omp.fit, X, y, Gram=G, Xy=Xy)
    assert_equal(omp.coef_.shape, (n_targets, n_features))
    assert_equal(omp.intercept_, 0)
    assert_true(count_nonzero(omp.coef_) <= n_targets * n_nonzero_coefs)
예제 #6
0
def test_n_nonzero_coefs():
    assert_true(count_nonzero(orthogonal_mp(X, y[:, 0],
                              n_nonzero_coefs=5)) <= 5)
    assert_true(count_nonzero(orthogonal_mp(X, y[:, 0], n_nonzero_coefs=5,
                                            precompute=True)) <= 5)
###############################################################################
# main code

start_time = time.time()

# benchmark bulk/atomic prediction speed for various regressors
configuration = {
    'n_train': int(1e3),
    'n_test': int(1e2),
    'n_features': int(1e2),
    'estimators': [
        {'name': 'Linear Model',
         'instance': SGDRegressor(penalty='elasticnet', alpha=0.01,
                                  l1_ratio=0.25, fit_intercept=True),
         'complexity_label': 'non-zero coefficients',
         'complexity_computer': lambda clf: count_nonzero(clf.coef_)},
        {'name': 'RandomForest',
         'instance': RandomForestRegressor(),
         'complexity_label': 'estimators',
         'complexity_computer': lambda clf: clf.n_estimators},
        {'name': 'SVR',
         'instance': SVR(kernel='rbf'),
         'complexity_label': 'support vectors',
         'complexity_computer': lambda clf: len(clf.support_vectors_)},
    ]
}
benchmark(configuration)

# benchmark n_features influence on prediction speed
percentile = 90
percentiles = n_feature_influence({'ridge': Ridge()},
예제 #8
0
    int(1e2),
    'n_features':
    int(1e2),
    'estimators': [
        {
            'name':
            'Linear Model',
            'instance':
            SGDRegressor(penalty='elasticnet',
                         alpha=0.01,
                         l1_ratio=0.25,
                         fit_intercept=True),
            'complexity_label':
            'non-zero coefficients',
            'complexity_computer':
            lambda clf: count_nonzero(clf.coef_)
        },
        {
            'name': 'RandomForest',
            'instance': RandomForestRegressor(),
            'complexity_label': 'estimators',
            'complexity_computer': lambda clf: clf.n_estimators
        },
        {
            'name': 'SVR',
            'instance': SVR(kernel='rbf'),
            'complexity_label': 'support vectors',
            'complexity_computer': lambda clf: len(clf.support_vectors_)
        },
    ]
}
예제 #9
0
def sparsity_ratio(X):
    return count_nonzero(X) / float(n_samples * n_features)
def _count_nonzero_coefficients(estimator):
    a = estimator.coef_.todense()
    return count_nonzero(a)
def _count_nonzero_coefficients(estimator):
    a = estimator.coef_.todense()
    return count_nonzero(a)
예제 #12
0
def sparsity_ratio(X):
    return count_nonzero(X) / float(n_samples * n_features)