def calc_local_params(self, Data, LP, nnzPerRowLP=0, **kwargs): ''' Compute local parameters for each data item and component. Parameters ------- Data : bnpy.data.DataObj subclass LP : dict Local parameters as key-value string/array pairs * E_log_soft_ev : 2D array, N x K E_log_soft_ev[n,k] = log p(data obs n | comp k) Returns ------- LP : dict Local parameters, with updated fields * resp : 2D array, size N x K array Posterior responsibility each comp has for each item resp[n, k] = p(z[n] = k | x[n]) ''' lpr = LP['E_log_soft_ev'] K = lpr.shape[1] if self.inferType.count('EM') > 0: # Using point estimates, for EM algorithm lpr += np.log(self.w + 1e-100) if nnzPerRowLP and (nnzPerRowLP > 0 and nnzPerRowLP < K): # SPARSE Assignments LP['nnzPerRow'] = nnzPerRowLP LP['spR'] = sparsifyLogResp(lpr, nnzPerRow=nnzPerRowLP) assert np.all(np.isfinite(LP['spR'].data)) else: lprPerItem = logsumexp(lpr, axis=1) lpr -= lprPerItem[:, np.newaxis] np.exp(lpr, out=lpr) LP['resp'] = lpr LP['evidence'] = lprPerItem.sum() else: # Full Bayesian approach, for VB or GS algorithms lpr += self.Elogw if nnzPerRowLP and (nnzPerRowLP > 0 and nnzPerRowLP < K): # SPARSE Assignments LP['nnzPerRow'] = nnzPerRowLP LP['spR'] = sparsifyLogResp(lpr, nnzPerRow=nnzPerRowLP) assert np.all(np.isfinite(LP['spR'].data)) else: # DENSE Assignments # Calculate exp in numerically safe way, # in-place so no new allocations occur NumericUtil.inplaceExpAndNormalizeRows(lpr) LP['resp'] = lpr assert np.allclose(lpr.sum(axis=1), 1) return LP
def calcLocalParams(Data, LP, Elogbeta=None, nnzPerRowLP=None, **kwargs): ''' Compute local parameters for each data item. Parameters ------- Data : bnpy.data.DataObj subclass LP : dict Local parameters as key-value string/array pairs * E_log_soft_ev : 2D array, N x K E_log_soft_ev[n,k] = log p(data obs n | comp k) Returns ------- LP : dict Local parameters, with updated fields * resp : 2D array, size N x K array Posterior responsibility each comp has for each item resp[n, k] = p(z[n] = k | x[n]) ''' lpr = LP['E_log_soft_ev'] lpr += Elogbeta K = LP['E_log_soft_ev'].shape[1] if nnzPerRowLP and (nnzPerRowLP > 0 and nnzPerRowLP < K): # SPARSE Assignments LP['spR'] = sparsifyLogResp(lpr, nnzPerRow=nnzPerRowLP) assert np.all(np.isfinite(LP['spR'].data)) LP['nnzPerRow'] = nnzPerRowLP else: # DENSE Assignments # Calculate exp in numerically stable manner (first subtract the max) # perform this in-place so no new allocations occur NumericUtil.inplaceExpAndNormalizeRows(lpr) LP['resp'] = lpr return LP
def calcInitSparseResp(LP, alphaEbeta, nnzPerRowLP=0, **kwargs): ''' Compute initial sparse responsibilities ''' assert 'ElogphiT' in LP # Determine the top-L for each logS = LP['ElogphiT'].copy() logS += np.log(alphaEbeta)[np.newaxis, :] init_spR = sparsifyLogResp(logS, nnzPerRowLP) return init_spR