Пример #1
0
def per_sample_loss(zs, xs):
    kl = [
        log_normal(z, *post) - log_normal(z, *prior) for (z, post, prior) in zs
    ]
    rc = [
        -log_bernoulli_with_logits(x, logits, args.eps) for (x, logits) in xs
    ]
    return tf.add_n(kl + rc)
Пример #2
0
def labeled_loss(x, px_logit, z, zm, zv, zm_prior, zv_prior):
    xy_loss = -log_bernoulli_with_logits(x, px_logit)
    xy_loss += log_normal(z, zm, zv) - log_normal(z, zm_prior, zv_prior)
    return xy_loss - np.log(0.1)
Пример #3
0
from tensorflow.contrib.framework import arg_scope
from tensorflow.examples.tutorials.mnist import input_data
import tensorbayes as tb
from tensorbayes.layers import *
from tensorbayes.distributions import log_bernoulli_with_logits, log_normal

if args.nonlin == 'relu':
    activate = tf.nn.relu
elif args.nonlin == 'elu':
    activate = tf.nn.elu
else:
    raise Exception("Unexpected nonlinearity arg")
args.save_dir = args.save_dir.rstrip('/')
log_file = 'results/lvae{:d}.csv'.format(args.run)
model_dir = '{:s}/{:s}'.format(args.save_dir, log_file.rstrip('.csv'))
log_bern = lambda x, logits: log_bernoulli_with_logits(x, logits, args.eps)
log_norm = lambda x, mu, var: log_normal(x, mu, var, 0.0)
writer = tb.FileWriter(log_file, args=args, pipe_to_sys=True)


# Convenience layers and graph blocks
def name(index, suffix):
    return 'z{:d}'.format(index) + '_' + suffix


def encode_block(x, h_size, z_size, idx):
    with tf.variable_scope(name(idx, 'encode')):
        h = dense(x, h_size, 'layer1', activation=activate)
        h = dense(h, h_size, 'layer2', activation=activate)
    with tf.variable_scope(name(idx, 'encode/likelihood')):
        z_m = dense(h, z_size, 'mean')