コード例 #1
0
from edward.stats import norm
from edward.variationals import Variational, Normal


class NormalPosterior:
    """
    p(x, z) = p(z) = p(z | x) = Normal(z; mu, std)
    """

    def __init__(self, mu, std):
        self.mu = mu
        self.std = std
        self.num_vars = 1

    def log_prob(self, xs, zs):
        return tf.pack([norm.logpdf(z, self.mu, self.std) for z in tf.unpack(zs)])


ed.set_seed(42)
mu = tf.constant(1.0)
std = tf.constant(1.0)
model = NormalPosterior(mu, std)
variational = Variational()
variational.add(Normal(model.num_vars))

inference = ed.MFVI(model, variational)
sess = inference.initialize()
for t in range(1000):
    loss = inference.update(sess)
    inference.print_progress(t, loss, sess)
コード例 #2
0
ファイル: bernoulli.py プロジェクト: fangzheng354/edward
    Posterior: (1-dimensional) Bernoulli
Variational model
    Likelihood: Mean-field Bernoulli
"""
import edward as ed
import tensorflow as tf

from edward.stats import bernoulli
from edward.variationals import Variational, Bernoulli


class BernoulliPosterior:
    """
    p(x, z) = p(z) = p(z | x) = Bernoulli(z; p)
    """
    def __init__(self, p):
        self.p = p

    def log_prob(self, xs, zs):
        return bernoulli.logpmf(zs, p)


ed.set_seed(42)
p = tf.constant(0.6)
model = BernoulliPosterior(p)
variational = Variational()
variational.add(Bernoulli(1))

inference = ed.MFVI(model, variational)
inference.run(n_iter=10000)
コード例 #3
0
            for j in xrange(self.N):
                if j == i:
                    mat[i, i] = self.kernel_xy(xi, xi, sess)
                else:
                    sij = self.kernel_xy(xi, xs[j, 1:], sess)
                    mat[i, j] = sij

        sess.close()
        return tf.constant(mat, dtype=tf.float32)

    def log_prob(self, xs, zs):
        K = self.kernel(xs)
        log_prior = multivariate_normal.logpdf(zs[:, :], cov=K)
        log_lik = tf.pack([tf.reduce_sum( \
            bernoulli.logpmf(xs[:,0], self.inverse_link(tf.mul(xs[:,0], z))) \
            ) for z in tf.unpack(zs)])
        return log_prior + log_lik


ed.set_seed(42)
# Data must have labels in the first column and features in
# subsequent columns.
df = np.loadtxt('data/crabs_train.txt', dtype='float32', delimiter=',')
data = ed.Data(tf.constant(df, dtype=tf.float32))

model = GaussianProcess(N=len(df))
variational = Variational()
variational.add(Normal(model.num_vars))
inference = ed.MFVI(model, variational, data)
inference.run(n_iter=10000)
コード例 #4
0
    Prior: Beta
    Likelihood: Bernoulli
Variational model
    Likelihood: Mean-field Beta
"""
import edward as ed
from edward.variationals import Variational, Beta

model_code = """
    data {
      int<lower=0> N;
      int<lower=0,upper=1> y[N];
    }
    parameters {
      real<lower=0,upper=1> theta;
    }
    model {
      theta ~ beta(1.0, 1.0);
      for (n in 1:N)
        y[n] ~ bernoulli(theta);
    }
"""
ed.set_seed(42)
model = ed.StanModel(model_code=model_code)
variational = Variational()
variational.add(Beta(1))
data = ed.Data(dict(N=10, y=[0, 1, 0, 0, 0, 0, 0, 0, 0, 1]))

inference = ed.MFVI(model, variational, data)
inference.run(n_iter=10000)
コード例 #5
0
ファイル: mixture_gaussian.py プロジェクト: Beronx86/edward
            for k in xrange(self.K):
                log_prior += norm.logpdf(mus[k*self.D], 0, np.sqrt(self.c))
                log_prior += norm.logpdf(mus[k*self.D+1], 0, np.sqrt(self.c))
                log_prior += invgamma.logpdf(sigmas[k*self.D], self.a, self.b)
                log_prior += invgamma.logpdf(sigmas[k*self.D+1], self.a, self.b)

            log_lik = tf.constant(0.0, dtype=tf.float32)
            for x in tf.unpack(xs):
                for k in xrange(self.K):
                    log_lik += tf.log(pi[k])
                    log_lik += multivariate_normal.logpdf(x,
                        mus[(k*self.D):((k+1)*self.D)],
                        sigmas[(k*self.D):((k+1)*self.D)])

            log_prob += [log_prior + log_lik]

        return tf.pack(log_prob)

ed.set_seed(42)
x = np.loadtxt('data/mixture_data.txt', dtype='float32', delimiter=',')
data = ed.Data(tf.constant(x, dtype=tf.float32))

model = MixtureGaussian(K=2, D=2)
variational = Variational()
variational.add(Dirichlet(1, model.K))
variational.add(Normal(model.K*model.D))
variational.add(InvGamma(model.K*model.D))

inference = ed.MFVI(model, variational, data)
inference.run(n_iter=10000, n_minibatch=5, n_data=5)
コード例 #6
0
ファイル: bernoulli.py プロジェクト: Beronx86/edward
"""
Probability model
    Posterior: (1-dimensional) Bernoulli
Variational model
    Likelihood: Mean-field Bernoulli
"""
import edward as ed
import tensorflow as tf

from edward.stats import bernoulli
from edward.variationals import Variational, Bernoulli

class BernoulliPosterior:
    """
    p(x, z) = p(z) = p(z | x) = Bernoulli(z; p)
    """
    def __init__(self, p):
        self.p = p

    def log_prob(self, xs, zs):
        return bernoulli.logpmf(zs, p)

ed.set_seed(42)
p = tf.constant(0.6)
model = BernoulliPosterior(p)
variational = Variational()
variational.add(Bernoulli(1))

inference = ed.MFVI(model, variational)
inference.run(n_iter=10000)
コード例 #7
0
"""
import edward as ed
import tensorflow as tf

from edward.stats import bernoulli, beta
from edward.variationals import Variational, Beta


class BetaBernoulli:
    """
    p(x, z) = Bernoulli(x | z) * Beta(z | 1, 1)
    """
    def __init__(self):
        self.num_vars = 1

    def log_prob(self, xs, zs):
        log_prior = beta.logpdf(zs, a=1.0, b=1.0)
        log_lik = tf.pack([tf.reduce_sum(bernoulli.logpmf(xs, z)) \
                           for z in tf.unpack(zs)])
        return log_lik + log_prior


ed.set_seed(42)
model = BetaBernoulli()
variational = Variational()
variational.add(Beta(model.num_vars))
data = ed.Data(tf.constant((0, 1, 0, 0, 0, 0, 0, 0, 0, 1), dtype=tf.float32))

inference = ed.MFVI(model, variational, data)
inference.run(n_iter=10000)
コード例 #8
0
ファイル: beta_bernoulli_tf.py プロジェクト: Beronx86/edward
Variational model
    Likelihood: Mean-field Beta
"""
import edward as ed
import tensorflow as tf

from edward.stats import bernoulli, beta
from edward.variationals import Variational, Beta

class BetaBernoulli:
    """
    p(x, z) = Bernoulli(x | z) * Beta(z | 1, 1)
    """
    def __init__(self):
        self.num_vars = 1

    def log_prob(self, xs, zs):
        log_prior = beta.logpdf(zs, a=1.0, b=1.0)
        log_lik = tf.pack([tf.reduce_sum(bernoulli.logpmf(xs, z)) \
                           for z in tf.unpack(zs)])
        return log_lik + log_prior

ed.set_seed(42)
model = BetaBernoulli()
variational = Variational()
variational.add(Beta(model.num_vars))
data = ed.Data(tf.constant((0, 1, 0, 0, 0, 0, 0, 0, 0, 1), dtype=tf.float32))

inference = ed.MFVI(model, variational, data)
inference.run(n_iter=10000)
コード例 #9
0
                log_prior += invgamma.logpdf(sigmas[k * self.D], self.a,
                                             self.b)
                log_prior += invgamma.logpdf(sigmas[k * self.D + 1], self.a,
                                             self.b)

            log_lik = tf.constant(0.0, dtype=tf.float32)
            for x in tf.unpack(xs):
                for k in xrange(self.K):
                    log_lik += tf.log(pi[k])
                    log_lik += multivariate_normal.logpdf(
                        x, mus[(k * self.D):((k + 1) * self.D)],
                        sigmas[(k * self.D):((k + 1) * self.D)])

            log_prob += [log_prior + log_lik]

        return tf.pack(log_prob)


ed.set_seed(42)
x = np.loadtxt('data/mixture_data.txt', dtype='float32', delimiter=',')
data = ed.Data(tf.constant(x, dtype=tf.float32))

model = MixtureGaussian(K=2, D=2)
variational = Variational()
variational.add(Dirichlet(1, model.K))
variational.add(Normal(model.K * model.D))
variational.add(InvGamma(model.K * model.D))

inference = ed.MFVI(model, variational, data)
inference.run(n_iter=10000, n_minibatch=5, n_data=5)
コード例 #10
0
ファイル: bernoulli_two.py プロジェクト: fangzheng354/edward
class BernoulliPosterior:
    """
    p(x, z) = p(z) = p(z | x) = Bernoulli(z; p)
    """
    def __init__(self, p):
        self.lp = tf.log(p)
        self.num_vars = get_dims(p)[0]

    def log_prob(self, xs, zs):
        return tf.pack([self.table_lookup(z) for z in tf.unpack(zs)])

    def table_lookup(self, x):
        """Look up value from the probability table."""
        elem = self.lp
        for d in range(self.num_vars):
            elem = tf.gather(elem, tf.to_int32(x[d]))

        return elem

ed.set_seed(42)
p = tf.constant(
[[0.4, 0.1],
 [0.1, 0.4]])
model = BernoulliPosterior(p)
variational = Variational()
variational.add(Bernoulli(model.num_vars))

inference = ed.MFVI(model, variational)
inference.run()