def update_global_params_VB(self, SS, mergeCompA=None, mergeCompB=None, **kwargs): ''' Update global parameters. ''' self.K = SS.K if not hasattr(self, 'rho') or self.rho.size != SS.K: # Big change from previous model is being proposed. # We'll init rho from scratch, and need more iters to improve. nGlobalIters = self.nGlobalItersBigChange else: # Small change required. Current rho is good initialization. nGlobalIters = self.nGlobalIters # Special update case for merges: # Fast, heuristic update for new rho given original value if mergeCompA is not None: beta = OptimizerRhoOmega.rho2beta_active(self.rho) beta[mergeCompA] += beta[mergeCompB] beta = np.delete(beta, mergeCompB, axis=0) self.rho = OptimizerRhoOmega.beta2rho(beta, SS.K) omega = self.omega omega[mergeCompA] += omega[mergeCompB] self.omega = np.delete(omega, mergeCompB, axis=0) # TODO think about smarter init for rho/omega?? # Update theta with recently updated info from suff stats self.transTheta, self.startTheta = self._calcTheta(SS) for giter in range(nGlobalIters): # Update rho, omega through numerical optimization self.rho, self.omega = self.find_optimum_rhoOmega(**kwargs) # Update theta again to reflect the new rho, omega self.transTheta, self.startTheta = self._calcTheta(SS)
def _convert_beta2rhoomega(self, beta, nDoc=10): ''' Find vectors rho, omega that are probable given beta Returns -------- rho : 1D array, size K omega : 1D array, size K ''' assert abs(np.sum(beta) - 1.0) < 0.001 rho = OptimizerRhoOmega.beta2rho(beta, self.K) omega = (nDoc + self.gamma) * np.ones(rho.size) return rho, omega