Ejemplo n.º 1
0
 def prepare(self):
     self._mu_encoder = Dense(self.size,
                              'linear',
                              init=GaussianInitializer(),
                              random_bias=True).init(self.input_dim)
     self._log_sigma_encoder = Dense(self.size,
                                     'linear',
                                     init=GaussianInitializer(),
                                     random_bias=True).init(self.input_dim)
     self.register_inner_layers(self._mu_encoder, self._log_sigma_encoder)
Ejemplo n.º 2
0
class ReparameterizationLayer(NeuralLayer):
    """
    Reparameterization layer in a Variational encoder.
    Only binary output cost function is supported now.
    The prior value is recorded after the computation graph created.
    """
    def __init__(self, size, sample=False):
        """
        :param size: the size of latent variable
        :param sample: whether to get a clean latent variable
        """
        super(ReparameterizationLayer, self).__init__("VariationalEncoder")
        self.size = size
        self.output_dim = size
        self.sample = sample
        self._prior = None

    def prepare(self):
        self._mu_encoder = Dense(self.size,
                                 'linear',
                                 init=GaussianInitializer(),
                                 random_bias=True).initialize(self.input_dim)
        self._log_sigma_encoder = Dense(self.size,
                                        'linear',
                                        init=GaussianInitializer(),
                                        random_bias=True).initialize(
                                            self.input_dim)
        self.register_inner_layers(self._mu_encoder, self._log_sigma_encoder)

    def compute_tensor(self, x):
        # Compute p(z|x)
        mu = self._mu_encoder.compute_tensor(x)
        log_sigma = 0.5 * self._log_sigma_encoder.compute_tensor(x)
        self._prior = 0.5 * T.sum(1 + 2 * log_sigma - mu**2 -
                                  T.exp(2 * log_sigma))
        # Reparameterization
        eps = global_theano_rand.normal((x.shape[0], self.size))

        if self.sample:
            z = mu
        else:
            z = mu + T.exp(log_sigma) * eps
        return z

    def prior(self):
        """
        Get the prior value.
        """
        return self._prior
Ejemplo n.º 3
0
class ReparameterizationLayer(NeuralLayer):
    """
    Reparameterization layer in a Variational encoder.
    Only binary output cost function is supported now.
    The prior value is recorded after the computation graph created.
    """

    def __init__(self, size, sample=False):
        """
        :param size: the size of latent variable
        :param sample: whether to get a clean latent variable
        """
        super(ReparameterizationLayer, self).__init__("VariationalEncoder")
        self.size = size
        self.output_dim = size
        self.sample = sample
        self._prior = None

    def prepare(self):
        self._mu_encoder = Dense(self.size, 'linear', init=GaussianInitializer(), random_bias=True).initialize(
            self.input_dim)
        self._log_sigma_encoder = Dense(self.size, 'linear', init=GaussianInitializer(), random_bias=True).initialize(
            self.input_dim)
        self.register_inner_layers(self._mu_encoder, self._log_sigma_encoder)

    def compute_tensor(self, x):
        # Compute p(z|x)
        mu = self._mu_encoder.compute_tensor(x)
        log_sigma = 0.5 * self._log_sigma_encoder.compute_tensor(x)
        self._prior = 0.5* T.sum(1 + 2*log_sigma - mu**2 - T.exp(2*log_sigma))
        # Reparameterization
        eps = global_theano_rand.normal((x.shape[0], self.size))

        if self.sample:
            z = mu
        else:
            z = mu + T.exp(log_sigma) * eps
        return z

    def prior(self):
        """
        Get the prior value.
        """
        return self._prior
Ejemplo n.º 4
0
 def setup(self):
     self._mu_encoder = Dense(self.size, 'linear', init=GaussianInitializer(), random_bias=True).connect(self.input_dim)
     self._log_sigma_encoder = Dense(self.size, 'linear', init=GaussianInitializer(), random_bias=True).connect(self.input_dim)
     self.register_inner_layers(self._mu_encoder, self._log_sigma_encoder)