def test_sparse_encode_unavailable_positivity(algo): n_components = 12 rng = np.random.RandomState(0) V = rng.randn(n_components, n_features) # random init V /= np.sum(V**2, axis=1)[:, np.newaxis] err_msg = "Positive constraint not supported for '{}' coding method." err_msg = err_msg.format(algo) with pytest.raises(ValueError, match=err_msg): sparse_encode(X, V, algorithm=algo, positive=True)
def test_sparse_encode_input(): n_components = 100 rng = np.random.RandomState(0) V = rng.randn(n_components, n_features) # random init V /= np.sum(V**2, axis=1)[:, np.newaxis] Xf = check_array(X, order='F') for algo in ('lasso_lars', 'lasso_cd', 'lars', 'omp', 'threshold'): a = sparse_encode(X, V, algorithm=algo) b = sparse_encode(Xf, V, algorithm=algo) assert_array_almost_equal(a, b)
def test_sparse_encode_error(): n_components = 12 rng = np.random.RandomState(0) V = rng.randn(n_components, n_features) # random init V /= np.sum(V**2, axis=1)[:, np.newaxis] code = sparse_encode(X, V, alpha=0.001) assert not np.all(code == 0) assert np.sqrt(np.sum((np.dot(code, V) - X)**2)) < 0.1
def test_sparse_encode_shapes(): n_components = 12 rng = np.random.RandomState(0) V = rng.randn(n_components, n_features) # random init V /= np.sum(V**2, axis=1)[:, np.newaxis] for algo in ('lasso_lars', 'lasso_cd', 'lars', 'omp', 'threshold'): code = sparse_encode(X, V, algorithm=algo) assert code.shape == (n_samples, n_components)
def test_sparse_encode_positivity(algo, positive): n_components = 12 rng = np.random.RandomState(0) V = rng.randn(n_components, n_features) # random init V /= np.sum(V**2, axis=1)[:, np.newaxis] code = sparse_encode(X, V, algorithm=algo, positive=positive) if positive: assert (code >= 0).all() else: assert (code < 0).any()
def test_sparse_encode_shapes_omp(): rng = np.random.RandomState(0) algorithms = ['omp', 'lasso_lars', 'lasso_cd', 'lars', 'threshold'] for n_components, n_samples in itertools.product([1, 5], [1, 9]): X_ = rng.randn(n_samples, n_features) dictionary = rng.randn(n_components, n_features) for algorithm, n_jobs in itertools.product(algorithms, [1, 3]): code = sparse_encode(X_, dictionary, algorithm=algorithm, n_jobs=n_jobs) assert code.shape == (n_samples, n_components)
def test_dict_learning_online_partial_fit(): n_components = 12 rng = np.random.RandomState(0) V = rng.randn(n_components, n_features) # random init V /= np.sum(V**2, axis=1)[:, np.newaxis] dict1 = MiniBatchDictionaryLearning(n_components, n_iter=10 * len(X), batch_size=1, alpha=1, shuffle=False, dict_init=V, random_state=0).fit(X) dict2 = MiniBatchDictionaryLearning(n_components, alpha=1, n_iter=1, dict_init=V, random_state=0) for i in range(10): for sample in X: dict2.partial_fit(sample[np.newaxis, :]) assert not np.all(sparse_encode(X, dict1.components_, alpha=1) == 0) assert_array_almost_equal(dict1.components_, dict2.components_, decimal=2)
def test_unknown_method(): n_components = 12 rng = np.random.RandomState(0) V = rng.randn(n_components, n_features) # random init with pytest.raises(ValueError): sparse_encode(X, V, algorithm="<unknown>")