def _make_posterior(std, weight_shape, full, suffix=None): """Check/make posterior.""" # We don't want a full-covariance on an intercept, check input_dim if full and len(weight_shape) > 1: post_W = gaus_posterior(weight_shape, std0=std, suffix=suffix) else: post_W = norm_posterior(weight_shape, std0=std, suffix=suffix) assert _is_dim(post_W, weight_shape), \ "Posterior inconsistent dimension!" return post_W
def _make_posterior(self, post_W, weight_shape): """Check/make posterior.""" if post_W is None: # We don't want a full-covariance on an intercept, check input_dim if self.full and len(weight_shape) > 1: post_W = gaus_posterior(weight_shape, std0=self.std) else: post_W = norm_posterior(weight_shape, std0=self.std) assert _is_dim(post_W, weight_shape), \ "Posterior inconsistent dimension!" return post_W
Phi, KL = ab.Conv2DVariational(filters=D, kernel_size=(4, 4), **dists)(X_) tc = tf.test.TestCase() with tc.test_session(): tf.global_variables_initializer().run() P = Phi.eval(feed_dict={x_: x}) assert P.shape == (S, N, height, width, D) assert KL.eval() >= 0. @pytest.mark.parametrize('dists', [ {'prior_W': norm_prior(DIM, 1.), 'prior_b': norm_prior((D,), 1.)}, {'post_W': norm_prior(DIM, 1.), 'post_b': norm_prior((D,), 1.)}, {'prior_W': norm_prior(DIM, 1.), 'post_W': norm_prior(DIM, 1.)}, {'prior_W': norm_prior(DIM, 1.), 'post_W': gaus_posterior(DIM, 1.)}, {'prior_W': gaus_posterior(DIM, 1.), 'post_W': gaus_posterior(DIM, 1.)}, ]) def test_dense_distribution(dists, make_data): """Test initialising dense variational layers with distributions.""" x, _, _ = make_data S = 3 x_, X_ = _make_placeholders(x, S) N = x.shape[0] Phi, KL = ab.DenseVariational(output_dim=D, **dists)(X_) tc = tf.test.TestCase() with tc.test_session(): tf.global_variables_initializer().run() P = Phi.eval(feed_dict={x_: x})