Exemplo n.º 1
0
    def generator(self, Z, reuse=False):
        with tf.variable_scope(self.get_name() + "/generator", reuse=reuse):
            print 'generator scope: ', tf.get_variable_scope().name
            logits = forward(Z, self.decoder_model)

            if self.p_x_given_z_func == distributions.Bernoulli:
                print 'generator: using bernoulli'
                return self.p_x_given_z_func(logits=logits)
            elif (self.p_x_given_z_func == distributions.Normal or self.p_x_given_z_func == distributions.Logistic) \
                 and self.encoder_model.layer_type == 'cnn':
                print 'generator: using exponential family [cnn]'
                channels = shp(logits)[3]
                assert channels % 2 == 0, "need to project to 2x the channels for gaussian p(x|z)"
                loc = logits[:, :, :, channels /
                             2:]  # tf.nn.sigmoid(logits[:, :, :, channels/2:])
                scale = 1e-6 + tf.nn.softplus(logits[:, :, :, 0:channels / 2])
                return self.p_x_given_z_func(loc=loc, scale=scale)
            elif (self.p_x_given_z_func == distributions.Normal or self.p_x_given_z_func == distributions.Logistic) \
                 and self.encoder_model.layer_type == 'dnn':
                print 'generator: using exponential family [dnn]'
                features = shp(logits)[-1]
                assert features % 2 == 0, "need to project to 2x the channels for gaussian p(x|z)"
                loc = logits[:, features /
                             2:]  # tf.nn.sigmoid(logits[:, :, :, channels/2:])
                scale = 1e-6 + tf.nn.softplus(logits[:, 0:features / 2])
                return self.p_x_given_z_func(loc=loc, scale=scale)
            else:
                raise Exception("unknown distribution provided for likelihood")
Exemplo n.º 2
0
 def generator(self, Z, reuse=False):
     with tf.variable_scope(self.get_name() + "/generator", reuse=reuse):
         print 'generator scope: ', tf.get_variable_scope().name
         # Use generator to determine mean of
         # Bernoulli distribution of reconstructed input
         # print 'batch norm for decoder: ', use_ln
         return forward(Z, self.decoder_model)
Exemplo n.º 3
0
 def encoder(self, X, rnd_sample=None, reuse=False, hard=False):
     with tf.variable_scope(self.get_name() + "/encoder", reuse=reuse):
         encoded = forward(X, self.encoder_model)
         return VAE.reparameterize(encoded,
                                   self.num_discrete,
                                   self.tau,
                                   hard=hard,
                                   rnd_sample=rnd_sample)