def runBatchEP2(workQueue, lockingPost): count = 0 for (W, K, docs, alpha, maxiters, thr, useNewton) in iter(workQueue.get, "none"): #read current parameter value (and copy) lam = copy.deepcopy(lockingPost.value()) #calculate correction ep2 = ep2_lda.EP2_LDA(W, K, docs, alpha, lam, useNewton) lam = ep2.train(maxiters, thr) ss = (lam - ep2._eta) #apply correction lockingPost.increment(ss) workQueue.task_done() count += 1 print("\tdone " + str(count))
def runBatchEP2(W, K, docs, alpha, lam, maxiters, threshold, useNewton): ep2 = ep2_lda.EP2_LDA(W, K, docs, alpha, lam, useNewton) lam = ep2.train(maxiters, threshold) return lam
def update_lambda(self, docs): ep2 = ep2_lda.EP2_LDA(self._W, self._K, docs, self._alpha, self._lambda, self._useNewton) self._lambda = ep2.train(self._maxiters, self._threshold) return (self._alpha, self._lambda)