def get_sample(self, mu, sigma, number_samples): """ One line description Parameters ---------- Returns ------- """ mu, sigma = broadcast_and_squeeze(mu, sigma) logit_sample = mu + sigma * np.random.normal(0, 1, size=mu.shape) return torch.sigmoid(logit_sample)
def get_sample(self, mu, sigma, number_samples): """ One line description Parameters ---------- Returns ------- """ mean, var = broadcast_and_squeeze(mu, sigma) sample = mean + sigma*np.random.normal(0, 1, size=mean.shape) return sample
def calculate_log_probability(self, x, mu, sigma): """ One line description Parameters ---------- Returns ------- """ x, mu, sigma = broadcast_and_squeeze(x, mu, sigma) log_probability = -torch.log(1 + (x - mu)**2 / sigma**2) return sum_data_dimensions(log_probability)
def calculate_log_probability(self, x, mu, sigma): """ One line description Parameters ---------- Returns ------- """ x, mu, sigma = broadcast_and_squeeze(x, mu, sigma) log_probability = -0.5*np.log(2*np.pi) - F.log(x) - F.log(sigma) - 0.5*(F.log(x)-mu)**2/(sigma**2) return sum_data_dimensions(log_probability)
def get_sample(self, n, z, number_samples): """ One line description Parameters ---------- Returns ------- """ n, z = broadcast_and_squeeze(n, z) binomial_sample = np.random.binomial(n.data, F.sigmoid(z).data) #TODO: Not reparametrizable (Gumbel?) return chainer.Variable(binomial_sample.astype("int32"))
def get_sample(self, mu, sigma, number_samples): """ One line description Parameters ---------- Returns ------- """ mu, sigma = broadcast_and_squeeze(mu, sigma) sample = mu + sigma*F.tan(np.pi*np.random.uniform(0,1,size=mu.shape).astype(np.float32)) return sample
def calculate_log_probability(self, x, p): """ One line description Parameters ---------- Returns ------- """ x, p = broadcast_and_squeeze(x, p) x = x.numpy() log_probability = torch.sum(x * torch.log(p), axis=2) return sum_data_dimensions(log_probability)
def get_sample(self, n, p, number_samples): """ One line description Parameters ---------- Returns ------- """ n, p = broadcast_and_squeeze(n, p) binomial_sample = np.random.binomial( n.numpy(), p.numpy()) #TODO: Not reparametrizable (Gumbel?) return torch.tensor(binomial_sample.astype("int32"))
def calculate_log_probability(self, x, n, p): """ One line description Parameters ---------- Returns ------- """ x, n, p = broadcast_and_squeeze(x, n, p) x, n = x.data, n.data log_probability = np.log(binom(n, x)) + x*F.log(p) + (n-x)*F.log(1-p) return sum_data_dimensions(log_probability)
def get_sample(self, mu, sigma, number_samples): """ One line description Parameters ---------- Returns ------- """ mu, sigma = broadcast_and_squeeze(mu, sigma) #sample = mu + sigma*torch.tensor(np.random.normal(0, 1, size=mu.shape)) # mu, sigma = broadcast_and_squeeze(mu, sigma) #TODO: is there a reason to create new vars? var was not used sample = distributions.normal.Normal(loc=mu, scale=sigma).rsample() return sample
def calculate_log_probability(self, x, mu, sigma): """ One line description Parameters ---------- Returns ------- """ x, mu, sigma = broadcast_and_squeeze(x, mu, sigma) # log_probability = -0.5*F.log(2*np.pi*sigma**2) - 0.5*(x-mu)**2/(sigma**2) log_probability = distributions.normal.Normal(loc=mu, scale=sigma).log_prob(x) return sum_data_dimensions(log_probability)
def calculate_log_probability(self, x, n, z): """ One line description Parameters ---------- Returns ------- """ x, n, z = broadcast_and_squeeze(x, n, z) x, n = x.data, n.data alpha = F.relu(-z).data beta = F.relu(z).data success_term = x*alpha - x*F.log(np.exp(alpha) + F.exp(alpha-z)) failure_term = (n-x)*beta - (n-x)*F.log(np.exp(beta) + F.exp(beta+z)) log_probability = np.log(binom(n, x)) + success_term + failure_term return sum_data_dimensions(log_probability)
import torch import numpy as np from torch import distributions from brancher import utilities from importlib import reload ## mu, sigma, x = torch.zeros(3, 1), torch.ones(3, 1), torch.randn(3, 1) mu, sigma, x = utilities.broadcast_and_squeeze(mu, sigma, x) print([i.numpy().shape for i in [mu, sigma, x]]) old = -0.5 * torch.log(2 * np.pi * sigma**2) - 0.5 * (x - mu)**2 / (sigma**2) new = distributions.normal.Normal(loc=mu, scale=sigma).log_prob(x) print(torch.equal(old, new)) print( torch.equal(utilities.sum_data_dimensions(old), utilities.sum_data_dimensions(new))) ## mu, sigma, x = torch.zeros(3, 1), torch.ones(3, 1), torch.randn(3, 1) mean, var = utilities.broadcast_and_squeeze(mu, sigma) old = mean + var * torch.tensor(np.random.normal(0, 1, size=mean.shape)).type( torch.FloatTensor) new = distributions.normal.Normal(loc=mean, scale=var).sample() print(old, new) ##
x = np.random.normal(size=(20, 5, 4, 2)) dim_index = 1 xt = utilities.sum_from_dim(torch.tensor(x), dim_index) xc = utilities.sum_from_dim_chainer(chainer.Variable(x), dim_index) equal_tensor_variable(xt, xc) ## partial_broadcast xl = [] for i in range(1, 3): xl.append(np.random.normal(size=(20, i, 10, 3))) xt = utilities.partial_broadcast(*[torch.tensor(x) for x in xl]) xc = utilities.partial_broadcast_chainer(*[chainer.Variable(x) for x in xl]) print([i.shape for i in xl]) print([i.numpy().shape for i in xt]) print([i.shape for i in xc]) ## broadcast_and_squeeze tensor_list = [torch.randn(10, 5), torch.randn(10, 5), torch.randn(10, 5)] xt = utilities.broadcast_and_squeeze(*tensor_list) #xc = utilities.broadcast_and_squeeze_chainer(*[chainer.Variable(x.numpy()) for x in tensor_list]) print([i.numpy().shape for i in xt]) #print([i.shape for i in xc]) # no torch.expand, no torch.repeat, no torch.view ## x = np.random.normal(size=(10, 4))