Exemplo n.º 1
0
 def __init__(self, name='autoencoder', **kwargs):
     super(VariationalAutoEncoder, self).__init__(name=name, **kwargs)
     self.vae = gen_vae.YZVAEModel()
     self.encoder = self.vae.inference_net()
     self.decoder = self.vae.generative_net([64, 64, 3], 1)
     self.sampling = Sampling()
     self.oldVAE = gen_vae.VariationalAutoencoder(nb.RealStd(), [64, 64, 3],
                                                  self.vae)
     self.z_sampler = nb.RealGauss()
Exemplo n.º 2
0
 def loss( self, samples, logging_context=None, epoch=None ):
     [ transformed, jacobian ] = self.forward( samples )
     transformed_loss = -nb.log_normal_pdf( transformed, transformed*0.0, transformed*0.0)
     jacobian_loss = tf.reduce_mean( -jacobian )
     if logging_context is not None:
         tf.summary.scalar( logging_context+"_transformed_loss", transformed_loss, step=epoch )
         tf.summary.scalar( logging_context+"_jacobian_loss", jacobian_loss, step=epoch )
     return  transformed_loss + jacobian_loss
def discrete_loss(y_true, y_pred):
    logits_parameters_output = nb.reshape_channel_to_parameters(y_pred, 10)
    scale_input = tf.multiply(y_true, scale_const)
    rounds = tf.cast(tf.clip_by_value(tf.round(scale_input), 0, 9), tf.int64)
    cross = tf.nn.sparse_softmax_cross_entropy_with_logits(
        rounds, logits_parameters_output)
    loss = tf.math.reduce_mean(tf.math.reduce_sum(cross, axis=[1, 2, 3]))
    return loss
Exemplo n.º 4
0
 def __init__(self, distribution, image_dims, vae_model=DefaultVAEModel()):
     super(VariationalAutoencoder, self).__init__()
     self.xinference_net = vae_model.inference_net()
     self.xgenerative_net = vae_model.generative_net(
         image_dims, distribution.no_of_parameters())
     self.distribution = distribution
     self.vae_model = vae_model
     self.latent_distribution = nb.RealGauss()
Exemplo n.º 5
0
    def loss(self, samples, logging_context=None, epoch=None):
        inf = self.xinference_net(samples)
        inf_params = nb.reshape_channel_to_parameters(inf, 2)
        sample_z = self.latent_distribution.sample(inf)

        gen_params = self.xgenerative_net(sample_z)
        reconstruction_loss = self.distribution.loss(gen_params, samples)
        kl_loss = self.kl_loss(sample_z, inf_params)
        loss = tf.reduce_mean(reconstruction_loss + kl_loss)
        if logging_context is not None:
            tf.summary.scalar(logging_context + "_kl_loss",
                              kl_loss,
                              step=epoch)
            tf.summary.scalar(logging_context + "_reconstruction_loss",
                              reconstruction_loss,
                              step=epoch)
        return loss
Exemplo n.º 6
0
def test_pixelvae( image_range=512, no_epoch=10, learning_rate=.0001 ):
    test_model( pvae.PixelVAE( nb.Binary(), [ 28, 28, 1 ] ), "PixelVAE Bin", train_bin_images[:image_range], test_z, 'bin', no_epoch, learning_rate )
    test_model( pvae.PixelVAE( nb.RealGauss(), [ 32, 32, 3 ] ), "PixelVAE RealGauss", deq_train_col_images[:image_range], test_z, 'col', no_epoch, learning_rate )
    test_model( pvae.PixelVAE( nb.Discrete(), [ 32, 32, 3 ] ), "PixelVAE Discrete", deq_train_col_images[:image_range], test_z, 'col', no_epoch, learning_rate )
Exemplo n.º 7
0
def test_cnn( image_range=512, no_epoch=10, learning_rate=.0001 ):
    test_model( cnn.PixelCNN( nb.Binary(), [ 28, 28, 1] ), "CNN Bin", train_bin_images[:image_range], None, 'bin', no_epoch, learning_rate )
    test_model( cnn.PixelCNN( nb.RealGauss(), [ 32, 32, 3 ] ), "CNN RealGauss", deq_train_col_images[:image_range], None, 'col', no_epoch, learning_rate )
    test_model( cnn.PixelCNN( nb.Discrete(), [ 32, 32, 3 ] ), "CNN Discrete", deq_train_col_images[:image_range], None, 'col', no_epoch, learning_rate )
Exemplo n.º 8
0
def test_vae( image_range=512, no_epoch=10, learning_rate=.0001 ):
    test_model( vae.VariationalAutoencoder( nb.Binary(), [ 28, 28, 1 ] ), "VAE Bin", train_bin_images[:image_range], test_z, 'bin', no_epoch, learning_rate )
    test_model( vae.VariationalAutoencoder( nb.RealGauss(), [ 32, 32, 3 ] ), "VAE RealGauss", deq_train_col_images[:image_range], test_z, 'col', no_epoch, learning_rate )
    test_model( vae.VariationalAutoencoder( nb.Discrete(), [ 32, 32, 3 ] ), "VAE Dscrete", deq_train_col_images[:image_range], test_z, 'col', no_epoch, learning_rate )
Exemplo n.º 9
0
def test_nb( image_range=512, no_epoch=10, learning_rate=.0001 ):
    test_model( nb.NBModel( nb.Binary(), [ 28, 28, 1 ] ), "NB Bin", train_bin_images[:image_range], None, 'bin', no_epoch, learning_rate )
    test_model( nb.NBModel( nb.RealGauss(), [ 32, 32, 3 ] ), "NB RealGauss", deq_train_col_images[:image_range], None, 'col', no_epoch, learning_rate )
    test_model( nb.NBModel( nb.Discrete(), [ 32, 32, 3 ] ), "NB Discrete", deq_train_col_images[:image_range], None, 'col', no_epoch, learning_rate )