def get_recon_error(recon, x, sigma): if x.shape[1] == 1: # Binary image ll = Bernoulli(recon).log_prob(x) elif x.shape[1] == 3: # RGB image ll = Normal(recon, sigma).log_prob(x) else: NotImplementedError('X must be either 1 or 3') return -ll.sum()
def generate_gg_blocks(N): """ Generate 'clean' data points using the ggblocks feature set @param N: the number of data points to generate This function doesn't sample points from the IBP: it uses a Bernoulli distribution for Z """ import numpy as np # we have to make sure there's at least one feature in each sample Z = Bernoulli(0.5).sample((N, 4)) while (Z.sum(1) == 0).any(): msk = (Z.sum(1) == 0) Z[msk] = Bernoulli(0.5).sample((msk.sum().item(), 4)) unique, counts = np.unique(Z.sum(1).numpy(), return_counts=True) print("# of points with 1 feature", float(counts[0])/float(N)) A = gg_blocks() return Z @ A
def sample_Z(N): Z = Bernoulli(0.25).sample((N, 4)) while (Z.sum(1) == 0).any(): msk = (Z.sum(1) == 0) Z[msk] = Bernoulli(0.25).sample((msk.sum().item(), 4)) return Z
def log_likelihood(X, y, w, use_cuda=False): data = torch.as_tensor(X.values) target = torch.as_tensor(y.values) logits = (data * w).sum(1) logp = Bernoulli(logits=logits).log_prob(target) return logp.sum()