def sample_indicator(self, like, null_class_proba): """ sample the indicator from the likelihood Parameters ---------- like: array of shape (nbitem,self.k) component-wise likelihood null_class_proba: array of shape(n_samples), prior probability to be under the null Returns ------- z: array of shape(nbitem): a draw of the membership variable Notes ----- Here z=-1 encodes for the null class """ n = like.shape[0] conditional_like_1 = ((1 - null_class_proba) * like.T).T conditional_like_0 = np.reshape(null_class_proba * self.null_dens, (n, 1)) conditional_like = np.hstack((conditional_like_0, conditional_like_1)) z = BGMM.sample_indicator(self, conditional_like) - 1 z[z == self.k] = self.k + np.arange(np.sum(z == self.k)) return z
def update(self, x, z): """ Update function (draw a sample of the IMM parameters) Parameters ---------- x array of shape (n_samples,self.dim) the data used in the estimation process z array of shape (n_samples), type = np.int the corresponding classification """ # re-dimension the priors in order to match self.k self.prior_means = np.repeat(self._prior_means, self.k, 0) self.prior_dof = self._prior_dof * np.ones(self.k) self.prior_shrinkage = self._prior_shrinkage * np.ones(self.k) self._dets = self._dets_ * np.ones(self.k) self._inv_prior_scale = np.repeat(self._inv_prior_scale_, self.k, 0) # initialize some variables self.means = np.zeros((self.k, self.dim)) self.precisions = np.zeros((self.k, self.dim, self.dim)) # proceed with the update BGMM.update(self, x, z)
def sample_indicator(self, like): """ Sample the indicator from the likelihood Parameters ---------- like: array of shape (nbitem,self.k) component-wise likelihood Returns ------- z: array of shape(nbitem): a draw of the membership variable Notes ----- The behaviour is different from standard bgmm in that z can take arbitrary values """ z = BGMM.sample_indicator(self, like) z[z == self.k] = self.k + np.arange(np.sum(z == self.k)) return z