def testBijector(self): x_ = np.arange(3 * 4 * 2).astype(np.float32).reshape(3, 4, 2) with self.test_session() as sess: ma = MaskedAutoregressiveFlow( validate_args=True, **self._autoregressive_flow_kwargs) x = constant_op.constant(x_) forward_x = ma.forward(x) # Use identity to invalidate cache. inverse_y = ma.inverse(array_ops.identity(forward_x)) fldj = ma.forward_log_det_jacobian(x) # Use identity to invalidate cache. ildj = ma.inverse_log_det_jacobian(array_ops.identity(forward_x)) variables.global_variables_initializer().run() [ forward_x_, inverse_y_, ildj_, fldj_, ] = sess.run([ forward_x, inverse_y, ildj, fldj, ]) self.assertEqual("masked_autoregressive_flow", ma.name) self.assertAllClose(forward_x_, forward_x_, rtol=1e-6, atol=0.) self.assertAllClose(x_, inverse_y_, rtol=1e-5, atol=0.) self.assertAllClose(ildj_, -fldj_, rtol=1e-6, atol=0.)
def testCompareToBijector(self): """Demonstrates equivalence between TD, Bijector approach and AR dist.""" sample_shape = np.int32([4, 5]) batch_shape = np.int32([]) event_size = np.int32(2) with self.cached_session() as sess: batch_event_shape = np.concatenate([batch_shape, [event_size]], axis=0) sample0 = array_ops.zeros(batch_event_shape) affine = Affine(scale_tril=self._random_scale_tril(event_size)) ar = autoregressive_lib.Autoregressive(self._normal_fn(affine), sample0, validate_args=True) ar_flow = MaskedAutoregressiveFlow(is_constant_jacobian=True, shift_and_log_scale_fn=lambda x: [None, affine.forward(x)], validate_args=True) td = transformed_distribution_lib.TransformedDistribution( distribution=normal_lib.Normal(loc=0., scale=1.), bijector=ar_flow, event_shape=[event_size], batch_shape=batch_shape, validate_args=True) x_shape = np.concatenate([sample_shape, batch_shape, [event_size]], axis=0) x = 2. * self._rng.random_sample(x_shape).astype(np.float32) - 1. td_log_prob_, ar_log_prob_ = sess.run( [td.log_prob(x), ar.log_prob(x)]) self.assertAllClose(td_log_prob_, ar_log_prob_, atol=0., rtol=1e-6)
def testBijector(self): x_ = np.arange(3 * 4 * 2).astype(np.float32).reshape(3, 4, 2) with self.cached_session() as sess: ma = MaskedAutoregressiveFlow(validate_args=True, **self._autoregressive_flow_kwargs) x = constant_op.constant(x_) forward_x = ma.forward(x) # Use identity to invalidate cache. inverse_y = ma.inverse(array_ops.identity(forward_x)) fldj = ma.forward_log_det_jacobian(x, event_ndims=1) # Use identity to invalidate cache. ildj = ma.inverse_log_det_jacobian(array_ops.identity(forward_x), event_ndims=1) variables.global_variables_initializer().run() [ forward_x_, inverse_y_, ildj_, fldj_, ] = sess.run([ forward_x, inverse_y, ildj, fldj, ]) self.assertEqual("masked_autoregressive_flow", ma.name) self.assertAllClose(forward_x_, forward_x_, rtol=1e-6, atol=0.) self.assertAllClose(x_, inverse_y_, rtol=1e-5, atol=0.) self.assertAllClose(ildj_, -fldj_, rtol=1e-6, atol=0.)
def testMutuallyConsistent(self): dims = 4 with self.cached_session() as sess: ma = MaskedAutoregressiveFlow(validate_args=True, **self._autoregressive_flow_kwargs) dist = transformed_distribution_lib.TransformedDistribution( distribution=normal_lib.Normal(loc=0., scale=1.), bijector=ma, event_shape=[dims], validate_args=True) self.run_test_sample_consistent_log_prob(sess_run_fn=sess.run, dist=dist, num_samples=int(1e5), radius=1., center=0., rtol=0.02)