def _make_prior(std, prior_W, weight_shape): """Check/make prior weight distributions.""" if prior_W is None: prior_W = norm_prior(weight_shape, std=std) assert _is_dim(prior_W, weight_shape), \ "Prior inconsistent dimension!" return prior_W
with tc.test_session(): tf.global_variables_initializer().run() P = Phi.eval(feed_dict={x_: x}) for i in range(P.shape[0]): p = P[i] assert p.shape == (N, 2 * D) # Check behaving properly with k(x, x) ~ 1.0 assert np.allclose((p**2).sum(axis=1), np.ones(N)) # Make sure we get a valid KL kl = KL.eval() if isinstance(KL, tf.Tensor) else KL assert kl >= 0 @pytest.mark.parametrize('dists', [ {'prior_W': norm_prior((4, 4, 3, D), 1.), 'prior_b': norm_prior((D,), 1.)}, {'post_W': norm_prior((4, 4, 3, D), 1.), 'post_b': norm_prior((D,), 1.)}, {'prior_W': norm_prior((4, 4, 3, D), 1.), 'post_W': norm_prior((4, 4, 3, D), 1.)} ]) def test_conv2d_distribution(dists, make_image_data): """Test initialising dense variational layers with distributions.""" x, _, X = make_image_data S = 3 x_, X_ = _make_placeholders(x, S) N, height, width, channels = x.shape Phi, KL = ab.Conv2DVariational(filters=D, kernel_size=(4, 4), **dists)(X_) tc = tf.test.TestCase()