Beispiel #1
0
 def test_real(self):
     with self.test_session():
         x = Normal(0.0, 1.0)
         y = ed.transform(x)
         self.assertIsInstance(y, Normal)
         sample = y.sample(10, seed=1).eval()
         self.assertSamplePosNeg(sample)
Beispiel #2
0
 def test_simplex(self):
     with self.test_session():
         x = Dirichlet([1.1, 1.2, 1.3, 1.4])
         y = ed.transform(x)
         self.assertIsInstance(y, TransformedDistribution)
         sample = y.sample(10, seed=1).eval()
         self.assertSamplePosNeg(sample)
Beispiel #3
0
 def test_nonnegative(self):
     with self.test_session():
         x = Gamma(1.0, 1.0)
         y = ed.transform(x)
         self.assertIsInstance(y, TransformedDistribution)
         sample = y.sample(10, seed=1).eval()
         self.assertSamplePosNeg(sample)
Beispiel #4
0
 def test_unhandled_support(self):
     with self.test_session():
         FakeRV = namedtuple('FakeRV', ['support'])
         x = FakeRV(support='rational')
         with self.assertRaises(ValueError):
             y = ed.transform(x)
Beispiel #5
0
 def test_no_support(self):
     with self.test_session():
         x = DirichletProcess(1.0, Normal(0.0, 1.0))
         with self.assertRaises(AttributeError):
             y = ed.transform(x)
Beispiel #6
0
 def test_multivariate_real(self):
     with self.test_session():
         x = MultivariateNormalDiag(tf.zeros(2), tf.ones(2))
         y = ed.transform(x)
         sample = y.sample(10, seed=1).eval()
         self.assertSamplePosNeg(sample)
Beispiel #7
0
 def test_kwargs(self):
     with self.test_session():
         x = Normal(-100.0, 1.0)
         y = ed.transform(x, bijector=bijectors.Softplus())
         sample = y.sample(10).eval()
         self.assertTrue((sample >= 0.0).all())
Beispiel #8
0
 def test_no_support(self):
     with self.test_session():
         x = PointMass(1.0)
         with self.assertRaises(ValueError):
             y = ed.transform(x)
                 probs=p,
                 concentrations=conc_param *
                 tf.ones(n_samples, dtype=np.float32),
                 sample_shape=n_samples,
                 value=tf.zeros(n_samples, dtype="float32"))

# INFERENCE
#qp = edward.models.BetaWithSoftplusConcentration(tf.Variable(1.), tf.Variable(1.))
qp = ed.models.Normal(loc=tf.get_variable("qp/loc", []),
                      scale=tf.nn.softplus(tf.get_variable("qp/scale", [])))
qconc = ed.models.Normal(loc=tf.get_variable("qconc/loc", []),
                         scale=tf.nn.softplus(
                             tf.get_variable("qconc/scale", [])))

inference = ed.KLqp({p: qp, conc_param: qconc}, data={x: x_data})
inference.run()

# PRINT RESULTS
qp_samples = ed.transform(
    qp, bijectors.Invert(
        inference.transformations[p].bijector)).sample(100).eval()

print("True prob success: {:.2f}, inferred {:.3f} +- {:.2f}".format(
    p_true, qp_samples.mean(), np.sqrt(qp_samples.var())))

qconc_samples = ed.transform(
    qconc, bijectors.Invert(
        inference.transformations[conc_param].bijector)).sample(100).eval()

print("True concentration: {:.2f}, Inferred: {:.3f} +- {:.2f}".format(
    true_conc, qconc_samples.mean(), np.sqrt(qconc_samples.var())))