def step(self): # S ~ N(0, exp(Z / 2)) # # Z = signal_node + noise_node + bias # = signal_node + gaussian_term # = scale_node + bias # # resample gaussian_term conditioned on signal_node scale_node = self.gsm_node.scale_node S = self.gsm_node.value() N, K = S.shape # resample Z Z = scale_node.value() + self.gsm_node.bias if scale_node.isleaf(): mu = self.gsm_node.bias * np.ones((N, K)) sigma_sq = scale_node.variance() else: assert scale_node.issum() mu = self.gsm_node.bias + scale_node.value() - scale_node.children[-1].value() sigma_sq = scale_node.children[-1].variance() for i in range(N): for k in range(K): log_f = sparse_coding.LogFUncollapsed(S[i, k]) if self.maximize: temp = lambda z: -log_f(z) - distributions.gauss_loglik(z, mu[i, k], sigma_sq[i, k]) Z[i, k] = scipy.optimize.fmin(temp, Z[i, k], disp=False) else: Z[i, k] = slice_sampling.slice_sample_gauss(log_f, mu[i, k], sigma_sq[i, k], Z[i, k]) # resample bias if scale_node.isleaf(): gaussian_term = Z else: signal = scale_node.value() - scale_node.children[-1].value() gaussian_term = Z - signal if not self.maximize: if self.gsm_node.bias_type == 'scalar': mu = gaussian_term.mean() lam = (1. / sigma_sq).sum() self.gsm_node.bias = np.random.normal(mu, 1. / lam) elif self.gsm_node.bias_type == 'row': mu = gaussian_term.mean(1) lam = (1. / sigma_sq).sum(1) self.gsm_node.bias = np.random.normal(mu, 1. / lam)[:, nax] elif self.gsm_node.bias_type == 'col': mu = gaussian_term.mean(0) lam = (1. / sigma_sq).sum(0) self.gsm_node.bias = np.random.normal(mu, 1. / lam)[nax, :] # set noise node noise_term = gaussian_term - self.gsm_node.bias if scale_node.isleaf(): scale_node.set_value(noise_term) else: scale_node.children[-1].set_value(noise_term)
def scaling_move(U, V, a): alpha_pts = np.logspace(-2., 2., 100) odds = np.zeros(len(alpha_pts)) for i, alpha in enumerate(alpha_pts): odds[i] = p_u(alpha * U[:, a]) + distributions.gauss_loglik(V[a, :] / alpha, 0., 1.).sum() p = np.exp(odds - np.logaddexp.reduce(odds)) p /= np.sum(p) idx = np.random.multinomial(1, p).argmax() alpha = alpha_pts[idx] U[:, a] *= alpha V[a, :] /= alpha
def p_s_given_z(S, Z, t, sigma_sq_approx): return (1. - t) * distributions.gauss_loglik(S, 0., sigma_sq_approx[nax, :]) + \ t * distributions.gauss_loglik(S, 0., np.exp(Z))
def loglik_each(self, predictions, noise): return np.where(self.mask, distributions.gauss_loglik(self.values, predictions, noise), 0.)
def loglik(self, predictions, noise): if not np.isscalar(noise): noise = noise[self.mask] return distributions.gauss_loglik(self.values[self.mask], predictions[self.mask], noise).sum()