예제 #1
0
 def calc_local_params(self, Data, LP, **kwargs):
     """ Calculate local parameters for each data item and each component.    
     This is part of the E-step.
     
     Args
     -------
     Data : bnpy data object with Data.nObs observations
     LP : local param dict with fields
           E_log_soft_ev : Data.nObs x K array
               E_log_soft_ev[n,k] = log p(data obs n | comp k)
     
     Returns
     -------
     LP : local param dict with fields
           resp : Data.nObs x K array whose rows sum to one
           resp[n,k] = posterior responsibility that comp. k has for data n                
 """
     lpr = LP["E_log_soft_ev"]
     if self.inferType.count("VB") > 0:
         lpr += self.Elogw
         # Calculate exp in numerically stable manner (first subtract the max)
         #  perform this in-place so no new allocations occur
         lpr -= np.max(lpr, axis=1)[:, np.newaxis]
         np.exp(lpr, out=lpr)
         # Normalize, so rows sum to one
         lpr /= lpr.sum(axis=1)[:, np.newaxis]
     elif self.inferType == "EM" > 0:
         lpr += np.log(self.w)
         lprPerItem = logsumexp(lpr, axis=1)
         np.exp(lpr - lprPerItem[:, np.newaxis], out=lpr)
         LP["evidence"] = lprPerItem.sum()
     LP["resp"] = lpr
     assert np.allclose(lpr.sum(axis=1), 1)
     return LP
예제 #2
0
 def calc_local_params(self, Data, LP, **kwargs):
   ''' Calculate local parameters for each data item and each component.    
       This is part of the E-step.
       
       Args
       -------
       Data : bnpy data object with Data.nObs observations
       LP : local param dict with fields
             E_log_soft_ev : Data.nObs x K array
                 E_log_soft_ev[n,k] = log p(data obs n | comp k)
       
       Returns
       -------
       LP : local param dict with fields
             resp : Data.nObs x K array whose rows sum to one
             resp[n,k] = posterior responsibility that comp. k has for data n                
   '''
   lpr = LP['E_log_soft_ev']
   if self.inferType.count('VB') > 0:
     lpr += self.Elogw
     # Calculate exp in numerically stable manner (first subtract the max)
     #  perform this in-place so no new allocations occur
     lpr -= np.max(lpr, axis=1)[:,np.newaxis]
     np.exp(lpr, out=lpr)
     # Normalize, so rows sum to one
     lpr /= lpr.sum(axis=1)[:,np.newaxis]
   elif self.inferType == 'EM' > 0:
     lpr += np.log(self.w)
     lprPerItem = logsumexp(lpr, axis=1)
     np.exp(lpr-lprPerItem[:,np.newaxis], out=lpr)
     LP['evidence'] = lprPerItem.sum()
   LP['resp'] = lpr
   assert np.allclose(lpr.sum(axis=1), 1)
   return LP
예제 #3
0
    def calc_local_params(self, Data, LP, nnzPerRowLP=0, **kwargs):
        ''' Compute local parameters for each data item and component.

        Parameters
        -------
        Data : bnpy.data.DataObj subclass

        LP : dict
            Local parameters as key-value string/array pairs
            * E_log_soft_ev : 2D array, N x K
                E_log_soft_ev[n,k] = log p(data obs n | comp k)

        Returns
        -------
        LP : dict
            Local parameters, with updated fields
            * resp : 2D array, size N x K array
                Posterior responsibility each comp has for each item
                resp[n, k] = p(z[n] = k | x[n])
        '''
        lpr = LP['E_log_soft_ev']
        K = lpr.shape[1]
        if self.inferType.count('EM') > 0:
            # Using point estimates, for EM algorithm
            lpr += np.log(self.w + 1e-100)
            if nnzPerRowLP and (nnzPerRowLP > 0 and nnzPerRowLP < K):
                # SPARSE Assignments
                LP['nnzPerRow'] = nnzPerRowLP
                LP['spR'] = sparsifyLogResp(lpr, nnzPerRow=nnzPerRowLP)
                assert np.all(np.isfinite(LP['spR'].data))
            else:
                lprPerItem = logsumexp(lpr, axis=1)
                lpr -= lprPerItem[:, np.newaxis]
                np.exp(lpr, out=lpr)
                LP['resp'] = lpr
                LP['evidence'] = lprPerItem.sum()
        else:
            # Full Bayesian approach, for VB or GS algorithms
            lpr += self.Elogw
            if nnzPerRowLP and (nnzPerRowLP > 0 and nnzPerRowLP < K):
                # SPARSE Assignments
                LP['nnzPerRow'] = nnzPerRowLP
                LP['spR'] = sparsifyLogResp(lpr, nnzPerRow=nnzPerRowLP)
                assert np.all(np.isfinite(LP['spR'].data))
            else:
                # DENSE Assignments
                # Calculate exp in numerically safe way,
                # in-place so no new allocations occur
                NumericUtil.inplaceExpAndNormalizeRows(lpr)
                LP['resp'] = lpr
                assert np.allclose(lpr.sum(axis=1), 1)
        return LP