예제 #1
0
def test_compact_random_feature_random_fourier(down):
    for gamma in [0.1, 1, 10]:
        # approximate kernel mapping
        transform_up = RandomFourier(n_components=100,
                                     gamma=gamma,
                                     random_state=0)
        transform_down = down(n_components=50, random_state=0)
        X_trans_naive = transform_down.fit_transform(
            transform_up.fit_transform(X))

        transform_up = RandomFourier(n_components=100,
                                     gamma=gamma,
                                     random_state=0)
        transform_down = down(n_components=50, random_state=0)
        transformer = CompactRandomFeature(transformer_up=transform_up,
                                           transformer_down=transform_down)
        X_trans = transformer.fit_transform(X)
        assert_allclose(X_trans_naive, X_trans)
예제 #2
0
def test_regressor_rf(normalize, loss):
    rng = np.random.RandomState(0)
    # approximate kernel mapping
    transformer = RandomFourier(n_components=100, random_state=0, gamma=10)
    X_trans = transformer.fit_transform(X)
    y, coef = generate_target(X_trans, rng, -0.1, 0.1)
    y_train = y[:n_train]
    y_test = y[n_train:]
    _test_regressor(transformer, X_train, y_train, X_test, y_test, X_trans,
                    normalize=normalize, loss=loss)
예제 #3
0
def test_sgd_regressor_rf_use_offset(loss):
    rng = np.random.RandomState(0)
    transform = RandomFourier(n_components=100,
                              random_state=0,
                              gamma=10,
                              use_offset=True)
    X_trans = transform.fit_transform(X)
    y, coef = generate_target(X_trans, rng, -0.1, 0.1)
    y_train = y[:n_train]
    y_test = y[n_train:]
    _test_regressor(transform, y_train, y_test, X_trans, loss=loss)
예제 #4
0
def test_sgd_classifier_rf(loss):
    rng = np.random.RandomState(0)
    transform = RandomFourier(n_components=100, random_state=0, gamma=10)
    X_trans = transform.fit_transform(X)
    y, coef = generate_target(X_trans, rng, -0.1, 0.1)
    y_train = y[:n_train]
    y_test = y[n_train:]
    _test_classifier(transform,
                     np.sign(y_train),
                     np.sign(y_test),
                     X_trans,
                     max_iter=500,
                     loss=loss)
예제 #5
0
def test_random_fourier(gamma, n_components, use_offset):
    for gamma, n_components in zip([10, 100], [2048, 4096]):
        # compute exact kernel
        kernel = rbf_kernel(X, Y, gamma)
        # approximate kernel mapping
        rf_transform = RandomFourier(n_components=n_components,
                                     gamma=gamma,
                                     use_offset=True,
                                     random_state=0)
        X_trans = rf_transform.fit_transform(X)
        Y_trans = rf_transform.transform(Y)
        kernel_approx = np.dot(X_trans, Y_trans.T)

        error = kernel - kernel_approx
        assert np.abs(np.mean(error)) < 0.01
        assert np.max(error) < 0.1  # nothing too far off
        assert np.mean(error) < 0.05  # mean is fairly close
        # for sparse matrix
        X_trans_sp = rf_transform.transform(csr_matrix(X))
        assert_allclose_dense_sparse(X_trans, X_trans_sp)
예제 #6
0
    assert score_lkrf_weak >= score_lkrf

    # remove bases
    n_nz = np.sum(lkrf.importance_weights_ != 0)
    print(n_nz)

    if lkrf.remove_bases():
        X_trans_removed = lkrf.transform(X)
        assert_almost_equal(X_trans_removed.shape[1], n_nz)
        indices = np.nonzero(lkrf.importance_weights_)[0]
        assert_almost_equal(X_trans_removed, X_trans[:, indices])
 

params =  [
    RBFSampler(n_components=128, random_state=0),   
    RandomFourier(n_components=128, random_state=0),   
    RandomFourier(n_components=128, random_state=0, use_offset=True),
    OrthogonalRandomFeature(n_components=128, random_state=0), 
    OrthogonalRandomFeature(n_components=128, random_state=0,
                            use_offset=True),
    RandomMaclaurin(random_state=0),
    RandomKernel(random_state=0)
]


@pytest.mark.parametrize("transformer", params)
def test_lkrf_chi2(transformer, rho=1):
    _test_learning_kernel_with_random_feature('chi2', transformer, rho)


def test_lkrf_chi2_origin():