コード例 #1
0
    def test_broadcasting_explicitly_unsupported(self):
        old_batch_shape = [4]
        new_batch_shape = [1, 4, 1]
        rate_ = self.dtype([1, 10, 2, 20])

        rate = tf1.placeholder_with_default(
            rate_, shape=old_batch_shape if self.is_static_shape else None)
        poisson_4 = tfd.Poisson(rate, validate_args=True)
        new_batch_shape_ph = (tf.constant(np.int32(new_batch_shape))
                              if self.is_static_shape else
                              tf1.placeholder_with_default(
                                  np.int32(new_batch_shape), shape=None))
        poisson_141_reshaped = tfd.BatchReshape(poisson_4,
                                                new_batch_shape_ph,
                                                validate_args=True)

        x_4 = self.dtype([2, 12, 3, 23])
        x_114 = self.dtype([2, 12, 3, 23]).reshape(1, 1, 4)

        if self.is_static_shape or tf.executing_eagerly():
            with self.assertRaisesRegexp(NotImplementedError,
                                         'too few batch and event dims'):
                poisson_141_reshaped.log_prob(x_4)
            with self.assertRaisesRegexp(NotImplementedError,
                                         'unexpected batch and event shape'):
                poisson_141_reshaped.log_prob(x_114)
            return

        with self.assertRaisesOpError('too few batch and event dims'):
            self.evaluate(poisson_141_reshaped.log_prob(x_4))

        with self.assertRaisesOpError('unexpected batch and event shape'):
            self.evaluate(poisson_141_reshaped.log_prob(x_114))
コード例 #2
0
 def test_at_most_one_implicit_dimension(self):
     batch_shape = tf.Variable([-1, -1])
     self.evaluate(batch_shape.initializer)
     with self.assertRaisesOpError('At most one dimension can be unknown'):
         d = tfd.BatchReshape(tfd.Normal(0, 1),
                              batch_shape,
                              validate_args=True)
         self.evaluate(d.sample(seed=test_util.test_seed()))
コード例 #3
0
def batch_reshapes(
    draw, batch_shape=None, event_dim=None,
    enable_vars=False, depth=None,
    eligibility_filter=lambda name: True, validate_args=True):
  """Strategy for drawing `BatchReshape` distributions.

  The underlying distribution is drawn from the `distributions` strategy.

  Args:
    draw: Hypothesis strategy sampler supplied by `@hps.composite`.
    batch_shape: An optional `TensorShape`.  The batch shape of the resulting
      `BatchReshape` distribution.  Note that the underlying distribution will
      in general have a different batch shape, to make the reshaping
      non-trivial.  Hypothesis will pick one if omitted.
    event_dim: Optional Python int giving the size of each of the underlying
      distribution's parameters' event dimensions.  This is shared across all
      parameters, permitting square event matrices, compatible location and
      scale Tensors, etc. If omitted, Hypothesis will choose one.
    enable_vars: TODO(bjp): Make this `True` all the time and put variable
      initialization in slicing_test.  If `False`, the returned parameters are
      all `tf.Tensor`s and not {`tf.Variable`, `tfp.util.DeferredTensor`
      `tfp.util.TransformedVariable`}
    depth: Python `int` giving maximum nesting depth of compound Distributions.
    eligibility_filter: Optional Python callable.  Blacklists some Distribution
      class names so they will not be drawn.
    validate_args: Python `bool`; whether to enable runtime assertions.

  Returns:
    dists: A strategy for drawing `BatchReshape` distributions with the
      specified `batch_shape` (or an arbitrary one if omitted).
  """
  if depth is None:
    depth = draw(depths())

  if batch_shape is None:
    batch_shape = draw(tfp_hps.shapes(min_ndims=1, max_side=4))

  # TODO(b/142135119): Wanted to draw general input and output shapes like the
  # following, but Hypothesis complained about filtering out too many things.
  # underlying_batch_shape = draw(tfp_hps.shapes(min_ndims=1))
  # hp.assume(
  #   batch_shape.num_elements() == underlying_batch_shape.num_elements())
  underlying_batch_shape = [tf.TensorShape(batch_shape).num_elements()]

  underlying = draw(
      distributions(
          batch_shape=underlying_batch_shape,
          event_dim=event_dim,
          enable_vars=enable_vars,
          depth=depth - 1,
          eligibility_filter=eligibility_filter,
          validate_args=validate_args))
  hp.note('Forming BatchReshape with underlying dist {}; '
          'parameters {}; batch_shape {}'.format(
              underlying, params_used(underlying), batch_shape))
  result_dist = tfd.BatchReshape(
      underlying, batch_shape=batch_shape, validate_args=True)
  return result_dist
コード例 #4
0
 def test_mutated_at_most_one_implicit_dimension(self):
     batch_shape = tf.Variable([1, 1])
     self.evaluate(batch_shape.initializer)
     dist = tfd.Normal([[0]], [[1]])
     d = tfd.BatchReshape(dist, batch_shape, validate_args=True)
     self.evaluate(d.sample(seed=test_util.test_seed()))
     with self.assertRaisesOpError('At most one dimension can be unknown'):
         with tf.control_dependencies([batch_shape.assign([-1, -1])]):
             self.evaluate(d.sample(seed=test_util.test_seed()))
コード例 #5
0
 def test_default_event_space_bijector(self):
     dist = tfd.Chi2([1., 2., 3., 6.], validate_args=True)
     batch_shape = [2, 2, 1]
     reshape_dist = tfd.BatchReshape(dist, batch_shape, validate_args=True)
     x = self.evaluate(dist._experimental_default_event_space_bijector()(
         10. * tf.ones(dist.batch_shape)))
     x_reshape = self.evaluate(
         reshape_dist._experimental_default_event_space_bijector()(
             10. * tf.ones(reshape_dist.batch_shape)))
     self.assertAllEqual(tf.reshape(x, batch_shape), x_reshape)
コード例 #6
0
 def test_default_event_space_bijector_scalar_congruency(self):
     dist = tfd.Triangular(low=2., high=10., peak=7., validate_args=True)
     reshape_dist = tfd.BatchReshape(dist,
                                     batch_shape=(),
                                     validate_args=True)
     eps = 1e-6
     bijector_test_util.assert_scalar_congruency(
         reshape_dist._experimental_default_event_space_bijector(),
         lower_x=2 + eps,
         upper_x=10 - eps,
         eval_func=self.evaluate)
コード例 #7
0
  def make_mvn(self, dims, new_batch_shape, old_batch_shape):
    new_batch_shape_ph = (
        tf.constant(np.int32(new_batch_shape)) if self.is_static_shape else
        tf1.placeholder_with_default(np.int32(new_batch_shape), shape=None))

    scale = np.ones(old_batch_shape + [dims], self.dtype)
    scale_ph = tf1.placeholder_with_default(
        scale, shape=scale.shape if self.is_static_shape else None)
    mvn = tfd.MultivariateNormalDiag(scale_diag=scale_ph, validate_args=True)
    reshape_mvn = tfd.BatchReshape(
        distribution=mvn, batch_shape=new_batch_shape_ph, validate_args=True)
    return mvn, reshape_mvn
コード例 #8
0
  def make_normal(self, new_batch_shape, old_batch_shape):
    new_batch_shape_ph = (
        tf.constant(np.int32(new_batch_shape)) if self.is_static_shape else
        tf1.placeholder_with_default(np.int32(new_batch_shape), shape=None))

    scale = self.dtype(0.5 + np.arange(
        np.prod(old_batch_shape)).reshape(old_batch_shape))
    scale_ph = tf1.placeholder_with_default(
        scale, shape=scale.shape if self.is_static_shape else None)
    normal = tfd.Normal(loc=self.dtype(0), scale=scale_ph, validate_args=True)
    reshape_normal = tfd.BatchReshape(
        distribution=normal, batch_shape=new_batch_shape_ph, validate_args=True)
    return normal, reshape_normal
コード例 #9
0
    def make_wishart(self, dims, new_batch_shape, old_batch_shape):
        new_batch_shape_ph = (tf.constant(np.int32(new_batch_shape))
                              if self.is_static_shape else
                              tf1.placeholder_with_default(
                                  np.int32(new_batch_shape), shape=None))

        scale = self.dtype([
            [[1., 0.5], [0.5, 1.]],
            [[0.5, 0.25], [0.25, 0.75]],
        ])
        scale = np.reshape(np.concatenate([scale, scale], axis=0),
                           old_batch_shape + [dims, dims])
        scale_ph = tf1.placeholder_with_default(
            scale, shape=scale.shape if self.is_static_shape else None)
        wishart = tfd.Wishart(df=5, scale=scale_ph)
        reshape_wishart = tfd.BatchReshape(distribution=wishart,
                                           batch_shape=new_batch_shape_ph,
                                           validate_args=True)

        return wishart, reshape_wishart
コード例 #10
0
    def test_default_event_space_bijector_bijective_and_finite(self):
        batch_shape = [5, 1, 4]
        batch_size = np.prod(batch_shape)
        low = tf.Variable(
            np.linspace(-5., 5., batch_size).astype(self.dtype),
            shape=(batch_size, ) if self.is_static_shape else None)
        dist = tfd.Uniform(low=low, high=30., validate_args=True)
        reshape_dist = tfd.BatchReshape(dist,
                                        batch_shape=batch_shape,
                                        validate_args=True)
        x = np.linspace(-10., 10.,
                        batch_size).astype(self.dtype).reshape(batch_shape)
        y = np.linspace(5., 30 - 1e-4,
                        batch_size).astype(self.dtype).reshape(batch_shape)

        self.evaluate(low.initializer)
        bijector_test_util.assert_bijective_and_finite(
            reshape_dist._experimental_default_event_space_bijector(),
            x,
            y,
            eval_func=self.evaluate,
            event_ndims=0,
            rtol=1e-4)