Ejemplo n.º 1
0
            for j in xrange(self.N):
                if j == i:
                    mat[i, i] = self.kernel_xy(xi, xi, sess)
                else:
                    sij = self.kernel_xy(xi, xs[j, 1:], sess)
                    mat[i, j] = sij

        sess.close()
        return tf.constant(mat, dtype=tf.float32)

    def log_prob(self, xs, zs):
        K = self.kernel(xs)
        log_prior = multivariate_normal.logpdf(zs[:, :], cov=K)
        log_lik = tf.pack([tf.reduce_sum( \
            bernoulli.logpmf(xs[:,0], self.inverse_link(tf.mul(xs[:,0], z))) \
            ) for z in tf.unpack(zs)])
        return log_prior + log_lik


ed.set_seed(42)
# Data must have labels in the first column and features in
# subsequent columns.
df = np.loadtxt('data/crabs_train.txt', dtype='float32', delimiter=',')
data = ed.Data(tf.constant(df, dtype=tf.float32))

model = GaussianProcess(N=len(df))
variational = Variational()
variational.add(Normal(model.num_vars))
inference = ed.MFVI(model, variational, data)
inference.run(n_iter=10000)
Ejemplo n.º 2
0
                log_prior += invgamma.logpdf(sigmas[k * self.D], self.a,
                                             self.b)
                log_prior += invgamma.logpdf(sigmas[k * self.D + 1], self.a,
                                             self.b)

            log_lik = tf.constant(0.0, dtype=tf.float32)
            for x in tf.unpack(xs):
                for k in xrange(self.K):
                    log_lik += tf.log(pi[k])
                    log_lik += multivariate_normal.logpdf(
                        x, mus[(k * self.D):((k + 1) * self.D)],
                        sigmas[(k * self.D):((k + 1) * self.D)])

            log_prob += [log_prior + log_lik]

        return tf.pack(log_prob)


ed.set_seed(42)
x = np.loadtxt('data/mixture_data.txt', dtype='float32', delimiter=',')
data = ed.Data(tf.constant(x, dtype=tf.float32))

model = MixtureGaussian(K=2, D=2)
variational = Variational()
variational.add(Dirichlet(1, model.K))
variational.add(Normal(model.K * model.D))
variational.add(InvGamma(model.K * model.D))

inference = ed.MFVI(model, variational, data)
inference.run(n_iter=10000, n_minibatch=5, n_data=5)