예제 #1
0
def check_l1_min_c(X, y, loss, fit_intercept=True, intercept_scaling=None):
    min_c = l1_min_c(X, y, loss, fit_intercept, intercept_scaling)

    clf = {
        "log": LogisticRegression(penalty="l1"),
        "squared_hinge": LinearSVC(loss="squared_hinge", penalty="l1", dual=False),
    }[loss]

    clf.fit_intercept = fit_intercept
    clf.intercept_scaling = intercept_scaling

    clf.C = min_c
    clf.fit(X, y)
    assert_true((np.asarray(clf.coef_) == 0).all())
    assert_true((np.asarray(clf.intercept_) == 0).all())

    clf.C = min_c * 1.01
    clf.fit(X, y)
    assert_true((np.asarray(clf.coef_) != 0).any() or (np.asarray(clf.intercept_) != 0).any())
예제 #2
0
def check_l1_min_c(X, y, loss, fit_intercept=True, intercept_scaling=None):
    min_c = l1_min_c(X, y, loss, fit_intercept, intercept_scaling)

    clf = {
        'log':  LogisticRegression(penalty='l1'),
        'l2':  LinearSVC(loss='l2', penalty='l1', dual=False),
    }[loss]

    clf.fit_intercept = fit_intercept
    clf.intercept_scaling = intercept_scaling

    clf.C = min_c
    clf.fit(X, y)
    assert_true((np.asarray(clf.coef_) == 0).all())
    assert_true((np.asarray(clf.intercept_) == 0).all())

    clf.C = min_c * 1.01
    clf.fit(X, y)
    assert_true((np.asarray(clf.coef_) != 0).any() or
                (np.asarray(clf.intercept_) != 0).any())
예제 #3
0
def check_l1_min_c(X, y, loss, fit_intercept=True, intercept_scaling=None):
    min_c = l1_min_c(X, y, loss, fit_intercept, intercept_scaling)

    clf = {
        'log': LogisticRegression(penalty='l1'),
        'l2': LinearSVC(loss='l2', penalty='l1', dual=False),
    }[loss]

    clf.fit_intercept = fit_intercept
    clf.intercept_scaling = intercept_scaling

    clf.C = min_c
    clf.fit(X, y)
    assert_true((np.asarray(clf.coef_) == 0).all())
    assert_true((np.asarray(clf.intercept_) == 0).all())

    clf.C = min_c * 1.01
    clf.fit(X, y)
    assert_true((np.asarray(clf.coef_) != 0).any()
                or (np.asarray(clf.intercept_) != 0).any())
예제 #4
0
def check_l1_min_c(X, y, loss, fit_intercept=True, intercept_scaling=None):
    min_c = l1_min_c(X, y, loss, fit_intercept, intercept_scaling)

    clf = {
        'log': LogisticRegression(penalty='l1', solver='liblinear',
                                  multi_class='ovr'),
        'squared_hinge': LinearSVC(loss='squared_hinge',
                                   penalty='l1', dual=False),
    }[loss]

    clf.fit_intercept = fit_intercept
    clf.intercept_scaling = intercept_scaling

    clf.C = min_c
    clf.fit(X, y)
    assert (np.asarray(clf.coef_) == 0).all()
    assert (np.asarray(clf.intercept_) == 0).all()

    clf.C = min_c * 1.01
    clf.fit(X, y)
    assert ((np.asarray(clf.coef_) != 0).any() or
            (np.asarray(clf.intercept_) != 0).any())
예제 #5
0
def check_l1_min_c(X, y, loss, fit_intercept=True, intercept_scaling=None):
    min_c = l1_min_c(X, y, loss, fit_intercept, intercept_scaling)

    clf = {
        ('log', False): LogisticRegression(penalty='l1'),
        ('log', True):  SparseLogRegression(penalty='l1'),
        ('l2', False):  LinearSVC(loss='l2', penalty='l1', dual=False),
        ('l2', True):   SparseSVC(loss='l2', penalty='l1', dual=False),
    }[loss, sp.issparse(X)]

    clf.fit_intercept = fit_intercept
    clf.intercept_scaling = intercept_scaling

    clf.C = min_c
    clf.fit(X, y)
    assert (np.asanyarray(clf.coef_) == 0).all()
    assert (np.asanyarray(clf.intercept_) == 0).all()

    clf.C = min_c * 1.01
    clf.fit(X, y)
    assert (np.asanyarray(clf.coef_) != 0).any() or \
           (np.asanyarray(clf.intercept_) != 0).any()
예제 #6
0
def check_l1_min_c(X, y, loss, fit_intercept=True, intercept_scaling=None):
    min_c = l1_min_c(X, y, loss, fit_intercept, intercept_scaling)

    clf = {
        ('log', False): LogisticRegression(penalty='l1'),
        ('log', True): SparseLogRegression(penalty='l1'),
        ('l2', False): LinearSVC(loss='l2', penalty='l1', dual=False),
        ('l2', True): SparseSVC(loss='l2', penalty='l1', dual=False),
    }[loss, sp.issparse(X)]

    clf.fit_intercept = fit_intercept
    clf.intercept_scaling = intercept_scaling

    clf.C = min_c
    clf.fit(X, y)
    assert (np.asarray(clf.coef_) == 0).all()
    assert (np.asarray(clf.intercept_) == 0).all()

    clf.C = min_c * 1.01
    clf.fit(X, y)
    assert (np.asarray(clf.coef_) != 0).any() or \
           (np.asarray(clf.intercept_) != 0).any()
예제 #7
0
def check_l1_min_c(X, y, loss, fit_intercept=True, intercept_scaling=None):
    min_c = l1_min_c(X, y, loss, fit_intercept, intercept_scaling)

    clf = {
        'log':
        LogisticRegression(penalty='l1', solver='liblinear',
                           multi_class='ovr'),
        'squared_hinge':
        LinearSVC(loss='squared_hinge', penalty='l1', dual=False),
    }[loss]

    clf.fit_intercept = fit_intercept
    clf.intercept_scaling = intercept_scaling

    clf.C = min_c
    clf.fit(X, y)
    assert (np.asarray(clf.coef_) == 0).all()
    assert (np.asarray(clf.intercept_) == 0).all()

    clf.C = min_c * 1.01
    clf.fit(X, y)
    assert ((np.asarray(clf.coef_) != 0).any()
            or (np.asarray(clf.intercept_) != 0).any())
예제 #8
0
def test_unsupported_loss():
    l1_min_c(dense_X, Y1, 'l1')
예제 #9
0
def test_ill_posed_min_c():
    X = [[0, 0], [0, 0]]
    y = [0, 1]
    l1_min_c(X, y)
예제 #10
0
def test_l2_deprecation():
    clean_warning_registry()
    with warnings.catch_warnings(record=True) as w:
        assert_equal(l1_min_c(dense_X, Y1, "l2"),
                     l1_min_c(dense_X, Y1, "squared_hinge"))
        assert_equal(w[0].category, DeprecationWarning)
예제 #11
0
def test_unsupported_loss():
    l1_min_c(dense_X, Y1, 'l1')
예제 #12
0
def test_ill_posed_min_c():
    X = [[0, 0], [0, 0]]
    y = [0, 1]
    l1_min_c(X, y)
예제 #13
0
def _check_param_grid(estimator, X, y, param_grid=None):
    """Check param_grid and return sensible default if param_grid is None.

    Parameters
    -----------
    estimator: str, optional
        The estimator to choose among: 'svc', 'svc_l2', 'svc_l1', 'logistic',
        'logistic_l1', 'logistic_l2', 'ridge', 'ridge_classifier',
        'ridge_regressor', and 'svr'. Note that the 'svc' and 'svc_l2';
        'logistic' and 'logistic_l2'; 'ridge' and 'ridge_regressor'
        correspond to the same estimator. Default 'svc'.

    X: list of Niimg-like objects
        See http://nilearn.github.io/manipulating_images/input_output.html
        Data on which model is to be fitted. If this is a list,
        the affine is considered the same for all.

    y: array or list of shape (n_samples)
        The dependent variable (age, sex, IQ, yes/no, etc.).
        Target variable to predict. Must have exactly as many elements as
        3D images in niimg.

    param_grid: dict of str to sequence, or sequence of such. Default None
        The parameter grid to explore, as a dictionary mapping estimator
        parameters to sequences of allowed values.

        An empty dict signifies default parameters.

        A sequence of dicts signifies a sequence of grids to search, and is
        useful to avoid exploring parameter combinations that make no sense
        or have no effect. See scikit-learn documentation for more information.

    Returns
    -------
    param_grid: dict of str to sequence, or sequence of such. Sensible default
    dict has size 1.

    """
    if param_grid is None:
        param_grid = {}
        # define loss function
        if isinstance(estimator, LogisticRegression):
            loss = 'log'
        elif isinstance(estimator, (LinearSVC, _BaseRidgeCV, SVR)):
            loss = 'squared_hinge'
        else:
            raise ValueError(
                "Invalid estimator. The supported estimators are: {}".format(
                    list(SUPPORTED_ESTIMATORS.keys())))
        # define sensible default for different types of estimators
        if hasattr(estimator, 'penalty') and (estimator.penalty == 'l1'):
            min_c = l1_min_c(X, y, loss=loss)
        else:
            min_c = 0.5

        if not isinstance(estimator, _BaseRidgeCV):
            param_grid['C'] = np.array([2, 20, 200]) * min_c
        else:
            param_grid = {}

    return param_grid
예제 #14
0
def test_unsupported_loss():
    with pytest.raises(ValueError):
        l1_min_c(dense_X, Y1, 'l1')
예제 #15
0
def test_ill_posed_min_c():
    X = [[0, 0], [0, 0]]
    y = [0, 1]
    with pytest.raises(ValueError):
        l1_min_c(X, y)
예제 #16
0
def test_l2_deprecation():
    clean_warning_registry()
    with warnings.catch_warnings(record=True) as w:
        assert_equal(l1_min_c(dense_X, Y1, "l2"),
                     l1_min_c(dense_X, Y1, "squared_hinge"))
        assert_equal(w[0].category, DeprecationWarning)