Example #1
0
 def random_sample(self):
     """Return a [NOBJ, NDIM] numpy array sampling over NSAMPLE using inverse interim_prior
     weights.  Needed to compute a posterior object."""
     ps = 1. / self.interim_prior
     ps /= np.sum(ps, axis=1)[:, np.newaxis]
     return np.array(
         [self.data[i, pick_discrete(p)] for i, p in enumerate(ps)])
Example #2
0
 def draw_new_label(self, i):
     # This is essentially Neal (2000) equation (3.6)
     # Note that the p probabilities are unnormalized here, but pick_discrete will rescale them
     # so that the total probability is 1.0.  This normalization also captures the factors of
     # b/(n-1+alpha) in Neal (2000).
     # -1 is sentinel for "make a new cluster"
     picked = pick_discrete(self.p[i] * np.append([1], self.nphi)) - 1
     return picked
Example #3
0
 def draw_new_label(self, i):
     # This is essentially Neal (2000) equation (3.6)
     # Note that the p probabilities are unnormalized here, but pick_discrete will rescale them
     # so that the total probability is 1.0.  This normalization also captures the factors of
     # b/(n-1+alpha) in Neal (2000).
     # -1 is sentinel for "make a new cluster"
     picked = pick_discrete(self.p[i]*np.append([1], self.nphi)) - 1
     return picked
Example #4
0
 def update_latent_data(self):
     # Update the latent "true" data in the case that the data is represented by a
     # Pseudo-marginal samples or (TBD) means and Gaussian errors.
     if isinstance(self._D, PseudoMarginalData):
         for i, ph in enumerate(self.phi):
             index = np.nonzero(self.label == i)[0]
             data = self._D[index]  # a PseudoMarginalData instance
             # calculate weights for selecting a representative sample
             ps = self.prior.like1(self.manip(data.data), ph) / data.interim_prior
             ps /= np.sum(ps, axis=1)[:, np.newaxis]  # think this line can go.
             for j, p in enumerate(ps):
                 self.D[index[j]] = data.data[j, pick_discrete(p)]
         # Need to update the r_i probabilities too since self.D changed.
         # self.r_i = self.alpha * self.prior.pred(self.mD)
         self.p[:, 0] = self.alpha * self.prior.pred(self.mD)
         self.manip_needs_update = True
     else:
         pass  # If data is already a numpy array, there's nothing to update.
Example #5
0
 def update_latent_data(self):
     # Update the latent "true" data in the case that the data is represented by a
     # Pseudo-marginal samples or (TBD) means and Gaussian errors.
     if isinstance(self._D, PseudoMarginalData):
         for i, ph in enumerate(self.phi):
             index = np.nonzero(self.label == i)[0]
             data = self._D[index]  # a PseudoMarginalData instance
             # calculate weights for selecting a representative sample
             ps = self.prior.like1(self.manip(data.data),
                                   ph) / data.interim_prior
             ps /= np.sum(ps, axis=1)[:,
                                      np.newaxis]  # think this line can go.
             for j, p in enumerate(ps):
                 self.D[index[j]] = data.data[j, pick_discrete(p)]
         # Need to update the r_i probabilities too since self.D changed.
         # self.r_i = self.alpha * self.prior.pred(self.mD)
         self.p[:, 0] = self.alpha * self.prior.pred(self.mD)
         self.manip_needs_update = True
     else:
         pass  # If data is already a numpy array, there's nothing to update.
Example #6
0
 def random_sample(self):
     """Return a [NOBJ, NDIM] numpy array sampling over NSAMPLE using inverse interim_prior
     weights.  Needed to compute a posterior object."""
     ps = 1./self.interim_prior
     ps /= np.sum(ps, axis=1)[:, np.newaxis]
     return np.array([self.data[i, pick_discrete(p)] for i, p in enumerate(ps)])