Esempio n. 1
0
def log_normal(x, mu, sigma, D, dim=1):
    if dim == 1:
        pre_term = -(D * 0.5 * np.log(2 * np.pi) + 0.5 * D * T.log(sigma))
        delta = T.sum((x - mu) ** 2, axis=1) * 1.0 / sigma
        return pre_term + -0.5 * delta
    elif dim == 2:
        pre_term = -(D * 0.5 * np.log(2 * np.pi) + 0.5 * T.sum(T.log(sigma), axis=1))
        delta = T.sum((x - mu) * 1.0 / sigma * (x - mu), axis=1)
    return pre_term + -0.5 * delta
Esempio n. 2
0
def log_normal(x, mu, sigma):
    pre = -(D / 2.0 * np.log(2 * np.pi) + 0.5 * T.log(sigma))
    return pre #- 0.5 * sigma * T.dot(x - mu, x - mu)
Esempio n. 3
0
 def A(t):
     return -c * T.log(1 - t)
Esempio n. 4
0
 def log_a(t):
     return T.log(c / (1 - t))
Esempio n. 5
0
def log1pexp(x):
    return T.log(1 + T.exp(x))
Esempio n. 6
0
 def _sample(self, num_samples):
     shape = self.shape()
     sample_shape = T.concat([[num_samples], shape], 0)
     random_sample = T.random_uniform(sample_shape)
     m, b = Stats.X(self.m), Stats.X(self.b)
     return m[None] - b[None] * T.log(-T.log(random_sample))
Esempio n. 7
0
from deepx import T
from deepx.nn import Linear
from deepx import stats

__all__ = ['Gaussian', 'Bernoulli']

log1pexp = lambda x: T.log(1. + T.exp(x) + 1e-4)


class Gaussian(Linear):
    def __init__(self, *args, **kwargs):
        self.cov_type = kwargs.pop('cov_type', 'diagonal')
        super(Gaussian, self).__init__(*args, **kwargs)
        assert not self.elementwise

    def get_dim_out(self):
        return [self.dim_out[0] * 2]

    def activate(self, X):
        import tensorflow as tf
        if self.cov_type == 'diagonal':
            sigma, mu = T.split(X, 2, axis=-1)
            sigma = T.matrix_diag(log1pexp(sigma))
            return stats.Gaussian([sigma, mu], parameter_type='regular')
        raise Exception("Undefined covariance type: %s" % self.cov_type)

    def __str__(self):
        return "Gaussian(%s)" % self.dim_out


class GaussianStats(Linear):
Esempio n. 8
0
 def compute(self, x):
     return T.log(x)
Esempio n. 9
0
 def log_likelihood(self, batch, batch_z):
     z = Vector(self.input_size, placeholder=batch_z, is_input=False)
     p = (z >> self.p_network).get_graph_outputs()[0]
     return T.mean(batch * p + (1 - batch) * T.log(1 - p + 1e-10))