Esempio n. 1
0
 def initialize_objective(self):
     H, ds, da = self.horizon, self.ds, self.da
     if self.time_varying:
         A = T.concatenate([T.eye(ds), T.zeros([ds, da])], -1)
         self.A = T.variable(A[None] + 1e-2 * T.random_normal([H - 1, ds, ds + da]))
         self.Q_log_diag = T.variable(T.random_normal([H - 1, ds]) + 1)
         self.Q = T.matrix_diag(T.exp(self.Q_log_diag))
     else:
         A = T.concatenate([T.eye(ds), T.zeros([ds, da])], -1)
         self.A = T.variable(A + 1e-2 * T.random_normal([ds, ds + da]))
         self.Q_log_diag = T.variable(T.random_normal([ds]) + 1)
         self.Q = T.matrix_diag(T.exp(self.Q_log_diag))
Esempio n. 2
0
def log1pexp(x):
    return T.log(1 + T.exp(x))
Esempio n. 3
0
 def log_likelihood(self, x):
     m, b = Stats.X(m), Stats.X(b)
     x = Stats.X(x)
     z = (x - m) / b
     return 1 / b * T.exp(-z - T.exp(-z))
Esempio n. 4
0
from deepx import T
from deepx.nn import Linear
from deepx import stats

__all__ = ['Gaussian', 'Bernoulli']

log1pexp = lambda x: T.log(1. + T.exp(x) + 1e-4)


class Gaussian(Linear):
    def __init__(self, *args, **kwargs):
        self.cov_type = kwargs.pop('cov_type', 'diagonal')
        super(Gaussian, self).__init__(*args, **kwargs)
        assert not self.elementwise

    def get_dim_out(self):
        return [self.dim_out[0] * 2]

    def activate(self, X):
        import tensorflow as tf
        if self.cov_type == 'diagonal':
            sigma, mu = T.split(X, 2, axis=-1)
            sigma = T.matrix_diag(log1pexp(sigma))
            return stats.Gaussian([sigma, mu], parameter_type='regular')
        raise Exception("Undefined covariance type: %s" % self.cov_type)

    def __str__(self):
        return "Gaussian(%s)" % self.dim_out


class GaussianStats(Linear):
Esempio n. 5
0
 def natural_to_regular(cls, natural_parameters):
     eta = natural_parameters[Stats.X]
     return T.exp(eta)
Esempio n. 6
0
 def log_likelihood(self, batch_z, batch):
     x = Vector(self.input_size, placeholder=batch, is_input=False)
     mu, sigma = (x >> self.q_network).get_graph_outputs()
     sigma = T.sqrt(T.exp(sigma))
     return T.mean(
         log_normal(batch_z, mu, sigma, self.embedding_size, dim=2))
Esempio n. 7
0
 def sample_z(self, batch, batch_noise, feed_dict={}):
     x = Vector(self.input_size, placeholder=batch, is_input=False)
     mu, sigma = (x >> self.q_network).get_graph_outputs()
     sigma = T.sqrt(T.exp(sigma))
     return mu + sigma * batch_noise