Ejemplo n.º 1
0
    return np.sum(topiclib.partial_slda_local_elbo(v.labeled[d], v.y[d], v.alphaL, v.beta[-v.Kl:], v.gammaL[d], v.phiL[d], v.eta, v.sigma_squared) for d in xrange(len(v.labeled)))

run_tlc = partial(graphlib.run_variational_em, 
                    e_step_func=tlc_e_step, 
                    m_step_func=tlc_m_step, 
                    global_elbo_func=tlc_global_elbo,
                    print_func=tlc_print_func)


            
if __name__=='__main__':
    dirname = 'synthtlc'
    dirname = 'synthbig'

    # use my tlc synthetically generated dataset
    documents = topiclib.read_sparse(dirname + '/documents.dat')
    comments = topiclib.read_sparse(dirname + '/comments.dat')
    labeled_documents = topiclib.read_sparse(dirname + '/labeled.dat')
    background = topiclib.read_sparse(dirname + '/background.dat')

    y = np.loadtxt(dirname + '/yL.npy')
    real_data = (documents, comments, labeled_documents, background, y)

    var = TLCVars(real_data, Ku=29, Ks=5, Kb=24)

    try:
        output = run_tlc(var)
    except Exception,e:
        print e
        import pdb; pdb.post_mortem()
Ejemplo n.º 2
0
                [
                 1.7,
                 2.0,
                 1.2,
                 4.8,
                 5,
                 4.2,
                ])

    
    #var = SupervisedLDAVars(test_data, K=3)
    #var = SupervisedLDAVars(noisy_test_data, K=3)



    # use my big generated dataset
    n = 9994

    labeled_documents = topiclib.read_sparse('data/synthbigtlc/labeled.dat')[:100]
    y = np.loadtxt('data/synthbigtlc/yL.npy')[:100]
    real_data = (labeled_documents, y)

    var = PartialSupervisedLDAVars(real_data, Ks=5, Kb=20)

    try:
        output = run_partial_slda(var)
    except Exception,e:
        print e
        import pdb; pdb.post_mortem()

Ejemplo n.º 3
0
                 [(5,2), (6,1), (8,1), (9,1),],
                ],
                [
                 1.7,
                 2.0,
                 1.2,
                 4.8,
                 5,
                 4.2,
                ])

    
    #var = SupervisedLDAVars(test_data, K=3)
    #var = SupervisedLDAVars(noisy_test_data, K=3)



    # use my big generated dataset
    labeled_documents = topiclib.read_sparse('synthtlc/labeled.dat')
    y = np.loadtxt('synthtlc/yL.npy')
    real_data = (labeled_documents, y)

    var = SupervisedLDAVars(real_data, K=13)

    try:
        output = run_slda(var)
    except Exception,e:
        print e
        import pdb; pdb.post_mortem()