Beispiel #1
0
import edward as ed
import numpy as np
from edward import PyMC3Model, Variational, Beta
import pymc3 as pm
import theano
import numpy as np

data_shared = theano.shared(np.zeros(1))

with pm.Model() as model:
    beta = pm.Beta("beta", 1, 1, transform=None)
    out = pm.Bernoulli("data", beta, observed=data_shared)

data = ed.Data(np.array([0, 1, 0, 0, 0, 0, 0, 0, 0, 1]))
m = PyMC3Model(model, data_shared)
variational = Variational()
variational.add(Beta(m.num_vars))
inference = ed.MFVI(m, variational, data)
inference.run(n_iter=10000)
Beispiel #2
0
        z ~ N(0, 1), p = phi(z)
        """
        z = tf.random_normal(size)
        return self.mapping(z)

class Data:
    def __init__(self, data):
        self.mnist = data

    def sample(self, size):
        x_batch, _ = mnist.train.next_batch(size)
        return x_batch

ed.set_seed(42)
model = NormalBernoulli(FLAGS.num_vars)
variational = Variational()
variational.add(Normal(FLAGS.num_vars))

if not os.path.exists(FLAGS.data_directory):
    os.makedirs(FLAGS.data_directory)
mnist = input_data.read_data_sets(FLAGS.data_directory, one_hot=True)
data = Data(mnist)

inference = ed.VAE(model, variational, data)
sess = inference.initialize(n_data=FLAGS.n_data)
with tf.variable_scope("model", reuse=True) as scope:
    p_rep = model.sample_prior([FLAGS.n_data, FLAGS.num_vars])

for epoch in range(FLAGS.n_epoch):
    avg_loss = 0.0