예제 #1
0
def test_lda_preplexity():
    """
    Test LDA preplexity for batch training
    preplexity should be lower after each iteration
    """
    n_topics, alpha, eta, X = _build_sparse_mtx()
    lda_1 = OnlineLDA(n_topics=n_topics, alpha=alpha, eta=eta, random_state=0)
    lda_2 = OnlineLDA(n_topics=n_topics, alpha=alpha, eta=eta, random_state=0)

    distr_1 = lda_1.fit_transform(X, max_iters=1)
    prep_1 = lda_1.preplexity(X, distr_1, sub_sampling=False)

    distr_2 = lda_2.fit_transform(X, max_iters=10)
    prep_2 = lda_2.preplexity(X, distr_2, sub_sampling=False)
    assert_greater_equal(prep_1, prep_2)
예제 #2
0
def test_lda_preplexity():
    """
    Test LDA preplexity for batch training
    preplexity should be lower after each iteration
    """
    n_topics, alpha, eta, X = _build_sparse_mtx()
    lda_1 = OnlineLDA(n_topics=n_topics, alpha=alpha, eta=eta, random_state=0)
    lda_2 = OnlineLDA(n_topics=n_topics, alpha=alpha, eta=eta, random_state=0)

    distr_1 = lda_1.fit_transform(X, max_iters=1)
    prep_1 = lda_1.preplexity(X, distr_1, sub_sampling=False)

    distr_2 = lda_2.fit_transform(X, max_iters=10)
    prep_2 = lda_2.preplexity(X, distr_2, sub_sampling=False)
    assert_greater_equal(prep_1, prep_2)