Example #1
0
 def calculate_log_probability(self, x, **parameters):
     self.check_parameters(**parameters)
     x, parameters, number_samples, number_datapoints = self._preprocess_parameters_for_log_prob(
         x, **parameters)
     log_prob = self._calculate_log_probability(x, **parameters)
     log_prob = self._postprocess_log_prob(log_prob, number_samples,
                                           number_datapoints)
     return sum_data_dimensions(log_prob)
Example #2
0
    def calculate_log_probability(self, x, mu, sigma):
        """
        One line description

        Parameters
        ----------

        Returns
        -------
        """
        x, mu, sigma = broadcast_and_squeeze(x, mu, sigma)
        log_probability = -0.5*np.log(2*np.pi) - F.log(x) - F.log(sigma) - 0.5*(F.log(x)-mu)**2/(sigma**2)
        return sum_data_dimensions(log_probability)
Example #3
0
    def calculate_log_probability(self, x, mu, sigma):
        """
        One line description

        Parameters
        ----------

        Returns
        -------
        """
        x, mu, sigma = broadcast_and_squeeze(x, mu, sigma)
        log_probability = -torch.log(1 + (x - mu)**2 / sigma**2)
        return sum_data_dimensions(log_probability)
Example #4
0
    def calculate_log_probability(self, x, p):
        """
        One line description

        Parameters
        ----------

        Returns
        -------
        """
        x, p = broadcast_and_squeeze(x, p)
        x = x.numpy()
        log_probability = torch.sum(x * torch.log(p), axis=2)
        return sum_data_dimensions(log_probability)
Example #5
0
    def calculate_log_probability(self, x, n, p):
        """
        One line description

        Parameters
        ----------

        Returns
        -------
        """
        x, n, p = broadcast_and_squeeze(x, n, p)
        x, n = x.data, n.data
        log_probability = np.log(binom(n, x)) + x*F.log(p) + (n-x)*F.log(1-p)
        return sum_data_dimensions(log_probability)
Example #6
0
    def calculate_log_probability(self, x, mu, sigma):
        """
        One line description

        Parameters
        ----------

        Returns
        -------
        """
        x, mu, sigma = broadcast_and_squeeze(x, mu, sigma)
        # log_probability = -0.5*F.log(2*np.pi*sigma**2) - 0.5*(x-mu)**2/(sigma**2)
        log_probability = distributions.normal.Normal(loc=mu,
                                                      scale=sigma).log_prob(x)
        return sum_data_dimensions(log_probability)
Example #7
0
    def calculate_log_probability(self, x, mu, chol_cov):
        """
        One line description

        Parameters
        ----------

        Returns
        -------
        """
        log_det = 2*F.sum(F.log(get_diagonal(chol_cov)), axis=2)
        whitened_input = F.matmul(F.transpose(chol_cov, axes=(1, 2, 4, 3)), x)
        exponent = F.sum(whitened_input**2, axis=2)
        log_probability = -0.5*np.log(2*np.pi) -0.5*log_det -0.5*exponent
        return sum_data_dimensions(log_probability)
Example #8
0
    def calculate_log_probability(self, x, n, z):
        """
        One line description

        Parameters
        ----------

        Returns
        -------
        """
        x, n, z = broadcast_and_squeeze(x, n, z)
        x, n = x.data, n.data
        alpha = F.relu(-z).data
        beta = F.relu(z).data
        success_term = x*alpha - x*F.log(np.exp(alpha) + F.exp(alpha-z))
        failure_term = (n-x)*beta - (n-x)*F.log(np.exp(beta) + F.exp(beta+z))
        log_probability = np.log(binom(n, x)) + success_term + failure_term
        return sum_data_dimensions(log_probability)
import torch
import numpy as np
from torch import distributions
from brancher import utilities
from importlib import reload

##
mu, sigma, x = torch.zeros(3, 1), torch.ones(3, 1), torch.randn(3, 1)
mu, sigma, x = utilities.broadcast_and_squeeze(mu, sigma, x)

print([i.numpy().shape for i in [mu, sigma, x]])

old = -0.5 * torch.log(2 * np.pi * sigma**2) - 0.5 * (x - mu)**2 / (sigma**2)
new = distributions.normal.Normal(loc=mu, scale=sigma).log_prob(x)

print(torch.equal(old, new))
print(
    torch.equal(utilities.sum_data_dimensions(old),
                utilities.sum_data_dimensions(new)))

##
mu, sigma, x = torch.zeros(3, 1), torch.ones(3, 1), torch.randn(3, 1)
mean, var = utilities.broadcast_and_squeeze(mu, sigma)
old = mean + var * torch.tensor(np.random.normal(0, 1, size=mean.shape)).type(
    torch.FloatTensor)
new = distributions.normal.Normal(loc=mean, scale=var).sample()

print(old, new)

##