예제 #1
0
def test_oneclass_decision_function():
    # Test OneClassSVM decision function
    clf = svm.OneClassSVM()
    rnd = check_random_state(2)

    # Generate train data
    X = 0.3 * rnd.randn(100, 2)
    X_train = np.r_[X + 2, X - 2]

    # Generate some regular novel observations
    X = 0.3 * rnd.randn(20, 2)
    X_test = np.r_[X + 2, X - 2]
    # Generate some abnormal novel observations
    X_outliers = rnd.uniform(low=-4, high=4, size=(20, 2))

    # fit the model
    clf = svm.OneClassSVM(nu=0.1, kernel="rbf", gamma=0.1)
    clf.fit(X_train)

    # predict things
    y_pred_test = clf.predict(X_test)
    assert np.mean(y_pred_test == 1) > .9
    y_pred_outliers = clf.predict(X_outliers)
    assert np.mean(y_pred_outliers == -1) > .9
    dec_func_test = clf.decision_function(X_test)
    assert_array_equal((dec_func_test > 0).ravel(), y_pred_test == 1)
    dec_func_outliers = clf.decision_function(X_outliers)
    assert_array_equal((dec_func_outliers > 0).ravel(), y_pred_outliers == 1)
예제 #2
0
def test_sparse_oneclasssvm(datasets_index, kernel):
    # Check that sparse OneClassSVM gives the same result as dense OneClassSVM
    # many class dataset:
    X_blobs, _ = make_blobs(n_samples=100, centers=10, random_state=0)
    X_blobs = sparse.csr_matrix(X_blobs)
    datasets = [[X_sp, None, T], [X2_sp, None, T2],
                [X_blobs[:80], None, X_blobs[80:]],
                [iris.data, None, iris.data]]
    dataset = datasets[datasets_index]
    clf = svm.OneClassSVM(gamma=1, kernel=kernel)
    sp_clf = svm.OneClassSVM(gamma=1, kernel=kernel)
    check_svm_model_equal(clf, sp_clf, *dataset)
예제 #3
0
def test_oneclass():
    # Test OneClassSVM
    clf = svm.OneClassSVM()
    clf.fit(X)
    pred = clf.predict(T)

    assert_array_equal(pred, [1, -1, -1])
    assert pred.dtype == np.dtype('intp')
    assert_array_almost_equal(clf.intercept_, [-1.218], decimal=3)
    assert_array_almost_equal(clf.dual_coef_, [[0.750, 0.750, 0.750, 0.750]],
                              decimal=3)
    with pytest.raises(AttributeError):
        (lambda: clf.coef_)()
예제 #4
0
def test_immutable_coef_property():
    # Check that primal coef modification are not silently ignored
    svms = [
        svm.SVC(kernel='linear').fit(iris.data, iris.target),
        svm.NuSVC(kernel='linear').fit(iris.data, iris.target),
        svm.SVR(kernel='linear').fit(iris.data, iris.target),
        svm.NuSVR(kernel='linear').fit(iris.data, iris.target),
        svm.OneClassSVM(kernel='linear').fit(iris.data),
    ]
    for clf in svms:
        with pytest.raises(AttributeError):
            clf.__setattr__('coef_', np.arange(3))
        with pytest.raises((RuntimeError, ValueError)):
            clf.coef_.__setitem__((0, 0), 0)
예제 #5
0
def test_n_support_oneclass_svr():
    # Make n_support is correct for oneclass and SVR (used to be
    # non-initialized)
    # this is a non regression test for issue #14774
    X = np.array([[0], [0.44], [0.45], [0.46], [1]])
    clf = svm.OneClassSVM()
    assert not hasattr(clf, 'n_support_')
    clf.fit(X)
    assert clf.n_support_ == clf.support_vectors_.shape[0]
    assert clf.n_support_.size == 1
    assert clf.n_support_ == 3

    y = np.arange(X.shape[0])
    reg = svm.SVR().fit(X, y)
    assert reg.n_support_ == reg.support_vectors_.shape[0]
    assert reg.n_support_.size == 1
    assert reg.n_support_ == 4
예제 #6
0
def test_oneclass_score_samples():
    X_train = [[1, 1], [1, 2], [2, 1]]
    clf = svm.OneClassSVM(gamma=1).fit(X_train)
    assert_array_equal(clf.score_samples([[2., 2.]]),
                       clf.decision_function([[2., 2.]]) + clf.offset_)