def mixtures_same_family(draw,
                         batch_shape=None,
                         event_dim=None,
                         enable_vars=False):
  if batch_shape is None:
    # Ensure the components dist has at least one batch dim (a component dim).
    batch_shape = draw(tfp_hps.batch_shapes(min_ndims=1, min_lastdimsize=2))
  else:  # This mixture adds a batch dim to its underlying components dist.
    batch_shape = tensorshape_util.concatenate(
        batch_shape,
        draw(tfp_hps.batch_shapes(min_ndims=1, max_ndims=1, min_lastdimsize=2)))

  component_dist, _ = draw(
      distributions(
          batch_shape=batch_shape,
          event_dim=event_dim,
          enable_vars=enable_vars,
          eligibility_filter=lambda name: name != 'MixtureSameFamily'))
  logging.info(
      'component distribution: %s; parameters used: %s', component_dist,
      [k for k, v in six.iteritems(component_dist.parameters) if v is not None])
  # scalar or same-shaped categorical?
  mixture_batch_shape = draw(
      hps.one_of(hps.just(batch_shape[:-1]), hps.just(tf.TensorShape([]))))
  mixture_dist, _ = draw(distributions(
      dist_name='Categorical',
      batch_shape=mixture_batch_shape,
      event_dim=tensorshape_util.as_list(batch_shape)[-1]))
  logging.info(
      'mixture distribution: %s; parameters used: %s', mixture_dist,
      [k for k, v in six.iteritems(mixture_dist.parameters) if v is not None])
  return (tfd.MixtureSameFamily(
      components_distribution=component_dist,
      mixture_distribution=mixture_dist,
      validate_args=True), batch_shape[:-1])
def independents(draw, batch_shape=None, event_dim=None, enable_vars=False):
  reinterpreted_batch_ndims = draw(hps.integers(min_value=0, max_value=2))
  if batch_shape is None:
    batch_shape = draw(
        tfp_hps.batch_shapes(min_ndims=reinterpreted_batch_ndims))
  else:  # This independent adds some batch dims to its underlying distribution.
    batch_shape = tensorshape_util.concatenate(
        batch_shape,
        draw(
            tfp_hps.batch_shapes(
                min_ndims=reinterpreted_batch_ndims,
                max_ndims=reinterpreted_batch_ndims)))
  underlying, batch_shape = draw(
      distributions(
          batch_shape=batch_shape,
          event_dim=event_dim,
          enable_vars=enable_vars,
          eligibility_filter=lambda name: name != 'Independent'))
  logging.info(
      'underlying distribution: %s; parameters used: %s', underlying,
      [k for k, v in six.iteritems(underlying.parameters) if v is not None])
  return (tfd.Independent(
      underlying,
      reinterpreted_batch_ndims=reinterpreted_batch_ndims,
      validate_args=True),
          batch_shape[:len(batch_shape) - reinterpreted_batch_ndims])
def distributions(draw,
                  dist_name=None,
                  batch_shape=None,
                  event_dim=None,
                  enable_vars=False,
                  eligibility_filter=lambda name: True):
  """Samples one a set of supported distributions."""
  if dist_name is None:

    dist_name = draw(
        hps.one_of(
            map(hps.just,
                [k for k in INSTANTIABLE_DISTS.keys() if eligibility_filter(k)])
            ))

  dist_cls, _ = INSTANTIABLE_DISTS[dist_name]
  if dist_name == 'Independent':
    return draw(independents(batch_shape, event_dim, enable_vars))
  if dist_name == 'MixtureSameFamily':
    return draw(mixtures_same_family(batch_shape, event_dim, enable_vars))
  if dist_name == 'TransformedDistribution':
    return draw(transformed_distributions(batch_shape, event_dim, enable_vars))

  if batch_shape is None:
    batch_shape = draw(tfp_hps.batch_shapes())

  params_kwargs = draw(
      broadcasting_params(
          dist_name, batch_shape, event_dim=event_dim, enable_vars=enable_vars))
  params_constrained = constraint_for(dist_name)(params_kwargs)
  assert_shapes_unchanged(params_kwargs, params_constrained)
  params_constrained['validate_args'] = True
  return dist_cls(**params_constrained), batch_shape
def transformed_distributions(draw,
                              batch_shape=None,
                              event_dim=None,
                              enable_vars=False):
  bijector = draw(bijector_hps.unconstrained_bijectors())
  logging.info('TD bijector: %s', bijector)
  if batch_shape is None:
    batch_shape = draw(tfp_hps.batch_shapes())
  underlying_batch_shape = batch_shape
  batch_shape_arg = None
  if draw(hps.booleans()):
    # Use batch_shape overrides.
    underlying_batch_shape = tf.TensorShape([])  # scalar underlying batch
    batch_shape_arg = batch_shape
  underlyings = distributions(
      batch_shape=underlying_batch_shape,
      event_dim=event_dim,
      enable_vars=enable_vars).map(
          lambda dist_and_batch_shape: dist_and_batch_shape[0]).filter(
              bijector_hps.distribution_filter_for(bijector))
  to_transform = draw(underlyings)
  logging.info(
      'TD underlying distribution: %s; parameters used: %s', to_transform,
      [k for k, v in six.iteritems(to_transform.parameters) if v is not None])
  return (tfd.TransformedDistribution(
      bijector=bijector,
      distribution=to_transform,
      batch_shape=batch_shape_arg,
      validate_args=True), batch_shape)
Esempio n. 5
0
def rq_splines(draw, batch_shape=None, dtype=tf.float32):
    if batch_shape is None:
        batch_shape = draw(tfp_hps.batch_shapes())

    lo = draw(hps.floats(min_value=-5, max_value=.5))
    hi = draw(hps.floats(min_value=-.5, max_value=5))
    lo, hi = min(lo, hi), max(lo, hi) + .2

    constraints = dict(
        bin_widths=functools.partial(bijector_hps.spline_bin_size_constraint,
                                     hi=hi,
                                     lo=lo,
                                     dtype=dtype),
        bin_heights=functools.partial(bijector_hps.spline_bin_size_constraint,
                                      hi=hi,
                                      lo=lo,
                                      dtype=dtype),
        knot_slopes=functools.partial(bijector_hps.spline_slope_constraint,
                                      dtype=dtype))
    params = draw(
        tfp_hps.broadcasting_params(batch_shape,
                                    params_event_ndims=dict(bin_widths=2,
                                                            bin_heights=2,
                                                            knot_slopes=2),
                                    constraint_fn_for=constraints.get))
    return tfb.RationalQuadraticSpline(range_min=lo,
                                       validate_args=draw(hps.booleans()),
                                       **params)
Esempio n. 6
0
def bijectors(draw,
              bijector_name=None,
              batch_shape=None,
              event_dim=None,
              enable_vars=False):
    if bijector_name is None:
        bijector_name = draw(hps.one_of(map(hps.just, TF2_FRIENDLY_BIJECTORS)))
    if batch_shape is None:
        batch_shape = draw(tfp_hps.batch_shapes())
    if event_dim is None:
        event_dim = draw(hps.integers(min_value=2, max_value=6))
    if bijector_name == 'Invert':
        underlying_name = draw(
            hps.one_of(map(hps.just,
                           set(TF2_FRIENDLY_BIJECTORS) - {'Invert'})))
        underlying, batch_shape = draw(
            bijectors(bijector_name=underlying_name,
                      batch_shape=batch_shape,
                      event_dim=event_dim,
                      enable_vars=enable_vars))
        return tfb.Invert(underlying, validate_args=True), batch_shape

    bijector_params = draw(
        broadcasting_params(bijector_name,
                            batch_shape,
                            event_dim=event_dim,
                            enable_vars=enable_vars))
    ctor = getattr(tfb, bijector_name)
    return ctor(validate_args=True, **bijector_params), batch_shape
Esempio n. 7
0
 def testTheoreticalFldj(self, data):
     # get_fldj_theoretical test rig requires 1-d batches.
     batch_shape = data.draw(tfp_hps.batch_shapes(min_ndims=1, max_ndims=1))
     bijector = data.draw(
         rq_splines(batch_shape=batch_shape, dtype=tf.float64))
     self.assertEqual(tf.float64, bijector.dtype)
     kx, ky, kd = self.evaluate(
         [bijector.bin_widths, bijector.bin_heights, bijector.knot_slopes])
     logging.info('kx: %s\nky: %s\nkd: %s', kx, ky, kd)
     x_shp = ((kx + ky)[..., :-1] + kd).shape[:-1]
     if x_shp[-1] == 1:  # Possibly broadcast the x dim.
         dim = data.draw(hps.integers(min_value=1, max_value=7))
         x_shp = x_shp[:-1] + (dim, )
     x = np.linspace(-5, 5, np.prod(x_shp),
                     dtype=np.float64).reshape(*x_shp)
     y = self.evaluate(bijector.forward(x))
     bijector_test_util.assert_bijective_and_finite(bijector,
                                                    x,
                                                    y,
                                                    eval_func=self.evaluate,
                                                    event_ndims=1,
                                                    inverse_event_ndims=1,
                                                    rtol=1e-5)
     fldj = bijector.forward_log_det_jacobian(x, event_ndims=1)
     fldj_theoretical = bijector_test_util.get_fldj_theoretical(
         bijector, x, event_ndims=1)
     self.assertAllClose(self.evaluate(fldj_theoretical),
                         self.evaluate(fldj),
                         atol=1e-5,
                         rtol=1e-5)
Esempio n. 8
0
def codomain_tensors(draw, bijector, shape=None):
    if is_invert(bijector):
        return draw(domain_tensors(bijector.bijector, shape))
    if shape is None:
        shape = draw(tfp_hps.batch_shapes())
    bijector_name = type(bijector).__name__
    support = bijector_hps.bijector_supports()[bijector_name].inverse
    constraint_fn = constrainer(support)
    return draw(tfp_hps.constrained_tensors(constraint_fn, shape))
Esempio n. 9
0
def base_distributions(draw,
                       dist_name=None,
                       batch_shape=None,
                       event_dim=None,
                       enable_vars=False,
                       eligibility_filter=lambda name: True):
    """Strategy for drawing arbitrary base Distributions.

  This does not draw compound distributions like `Independent`,
  `MixtureSameFamily`, or `TransformedDistribution`; only base Distributions
  that do not accept other Distributions as arguments.

  Args:
    draw: Hypothesis MacGuffin.  Supplied by `@hps.composite`.
    dist_name: Optional Python `str`.  If given, the produced distributions
      will all have this type.
    batch_shape: An optional `TensorShape`.  The batch shape of the resulting
      Distribution.  Hypothesis will pick a batch shape if omitted.
    event_dim: Optional Python int giving the size of each of the
      distribution's parameters' event dimensions.  This is shared across all
      parameters, permitting square event matrices, compatible location and
      scale Tensors, etc. If omitted, Hypothesis will choose one.
    enable_vars: TODO(bjp): Make this `True` all the time and put variable
      initialization in slicing_test.  If `False`, the returned parameters are
      all Tensors, never Variables or DeferredTensor.
    eligibility_filter: Optional Python callable.  Blacklists some Distribution
      class names so they will not be drawn at the top level.

  Returns:
    dists: A strategy for drawing Distributions with the specified `batch_shape`
      (or an arbitrary one if omitted).
  """
    if dist_name is None:
        names = [
            k for k in INSTANTIABLE_BASE_DISTS.keys() if eligibility_filter(k)
        ]
        dist_name = draw(hps.one_of(map(hps.just, names)))

    if batch_shape is None:
        batch_shape = draw(tfp_hps.batch_shapes())

    params_kwargs = draw(
        broadcasting_params(dist_name,
                            batch_shape,
                            event_dim=event_dim,
                            enable_vars=enable_vars))
    params_constrained = constraint_for(dist_name)(params_kwargs)
    assert_shapes_unchanged(params_kwargs, params_constrained)
    params_constrained['validate_args'] = True
    dist_cls = INSTANTIABLE_BASE_DISTS[dist_name].cls
    result_dist = dist_cls(**params_constrained)
    if batch_shape != result_dist.batch_shape:
        msg = ('Distributions strategy generated a bad batch shape '
               'for {}, should have been {}.').format(result_dist, batch_shape)
        raise AssertionError(msg)
    return result_dist
def generalized_paretos(draw, batch_shape=None):
  if batch_shape is None:
    batch_shape = draw(tfp_hps.batch_shapes())

  constraints = dict(
      loc=tfp_hps.identity_fn,
      scale=tfp_hps.softplus_plus_eps(),
      concentration=lambda x: tf.math.tanh(x) * 0.24)  # <.25==safe for variance

  params = draw(
      tfp_hps.broadcasting_params(
          batch_shape,
          params_event_ndims=dict(loc=0, scale=0, concentration=0),
          constraint_fn_for=constraints.get))
  dist = tfd.GeneralizedPareto(validate_args=draw(hps.booleans()), **params)
  if dist.batch_shape != batch_shape:
    raise AssertionError('batch_shape mismatch: expect {} but got {}'.format(
        batch_shape, dist))
  return dist
Esempio n. 11
0
def mixtures_same_family(draw,
                         batch_shape=None,
                         event_dim=None,
                         enable_vars=False,
                         depth=None):
    """Strategy for drawing `MixtureSameFamily` distributions.

  The component distribution is drawn from the `distributions` strategy.

  The Categorical mixture distributions are either shared across all batch
  members, or drawn independently for the full batch (as required by
  `MixtureSameFamily`).

  Args:
    draw: Hypothesis MacGuffin.  Supplied by `@hps.composite`.
    batch_shape: An optional `TensorShape`.  The batch shape of the resulting
      `MixtureSameFamily` distribution.  The component distribution will have a
      batch shape of 1 rank higher (for the components being mixed).  Hypothesis
      will pick a batch shape if omitted.
    event_dim: Optional Python int giving the size of each of the component
      distribution's parameters' event dimensions.  This is shared across all
      parameters, permitting square event matrices, compatible location and
      scale Tensors, etc. If omitted, Hypothesis will choose one.
    enable_vars: TODO(bjp): Make this `True` all the time and put variable
      initialization in slicing_test.  If `False`, the returned parameters are
      all Tensors, never Variables or DeferredTensor.
    depth: Python `int` giving maximum nesting depth of compound Distributions.

  Returns:
    dists: A strategy for drawing `MixtureSameFamily` distributions with the
      specified `batch_shape` (or an arbitrary one if omitted).
  """
    if depth is None:
        depth = draw(depths())

    if batch_shape is None:
        # Ensure the components dist has at least one batch dim (a component dim).
        batch_shape = draw(tfp_hps.batch_shapes(min_ndims=1,
                                                min_lastdimsize=2))
    else:  # This mixture adds a batch dim to its underlying components dist.
        batch_shape = tensorshape_util.concatenate(
            batch_shape,
            draw(
                tfp_hps.batch_shapes(min_ndims=1,
                                     max_ndims=1,
                                     min_lastdimsize=2)))

    component_dist = draw(
        distributions(batch_shape=batch_shape,
                      event_dim=event_dim,
                      enable_vars=enable_vars,
                      depth=depth - 1))
    logging.info('component distribution: %s; parameters used: %s',
                 component_dist, [
                     k for k, v in six.iteritems(component_dist.parameters)
                     if v is not None
                 ])
    # scalar or same-shaped categorical?
    mixture_batch_shape = draw(
        hps.one_of(hps.just(batch_shape[:-1]), hps.just(tf.TensorShape([]))))
    mixture_dist = draw(
        base_distributions(dist_name='Categorical',
                           batch_shape=mixture_batch_shape,
                           event_dim=tensorshape_util.as_list(batch_shape)[-1],
                           enable_vars=enable_vars))
    logging.info(
        'mixture distribution: %s; parameters used: %s', mixture_dist, [
            k
            for k, v in six.iteritems(mixture_dist.parameters) if v is not None
        ])
    result_dist = tfd.MixtureSameFamily(components_distribution=component_dist,
                                        mixture_distribution=mixture_dist,
                                        validate_args=True)
    if batch_shape[:-1] != result_dist.batch_shape:
        msg = ('TransformedDistribution strategy generated a bad batch shape '
               'for {}, should have been {}.').format(result_dist,
                                                      batch_shape[:-1])
        raise AssertionError(msg)
    return result_dist
Esempio n. 12
0
def transformed_distributions(draw,
                              batch_shape=None,
                              event_dim=None,
                              enable_vars=False,
                              depth=None):
    """Strategy for drawing `TransformedDistribution`s.

  The transforming bijector is drawn from the
  `bijectors.hypothesis_testlib.unconstrained_bijectors` strategy.

  The underlying distribution is drawn from the `distributions` strategy, except
  that it must be compatible with the bijector according to
  `bijectors.hypothesis_testlib.distribution_filter_for` (these generally check
  that vector bijectors are not combined with scalar distributions, etc).

  Args:
    draw: Hypothesis MacGuffin.  Supplied by `@hps.composite`.
    batch_shape: An optional `TensorShape`.  The batch shape of the resulting
      `TransformedDistribution`.  The underlying distribution will sometimes
      have the same `batch_shape`, and sometimes have scalar batch shape.
      Hypothesis will pick a `batch_shape` if omitted.
    event_dim: Optional Python int giving the size of each of the underlying
      distribution's parameters' event dimensions.  This is shared across all
      parameters, permitting square event matrices, compatible location and
      scale Tensors, etc. If omitted, Hypothesis will choose one.
    enable_vars: TODO(bjp): Make this `True` all the time and put variable
      initialization in slicing_test.  If `False`, the returned parameters are
      all Tensors, never Variables or DeferredTensor.
    depth: Python `int` giving maximum nesting depth of compound Distributions.

  Returns:
    dists: A strategy for drawing `TransformedDistribution`s with the specified
      `batch_shape` (or an arbitrary one if omitted).
  """
    if depth is None:
        depth = draw(depths())

    bijector = draw(bijector_hps.unconstrained_bijectors())
    logging.info('TD bijector: %s', bijector)
    if batch_shape is None:
        batch_shape = draw(tfp_hps.batch_shapes())
    underlying_batch_shape = batch_shape
    batch_shape_arg = None
    if draw(hps.booleans()):
        # Use batch_shape overrides.
        underlying_batch_shape = tf.TensorShape([])  # scalar underlying batch
        batch_shape_arg = batch_shape
    underlyings = distributions(
        batch_shape=underlying_batch_shape,
        event_dim=event_dim,
        enable_vars=enable_vars,
        depth=depth - 1).filter(bijector_hps.distribution_filter_for(bijector))
    to_transform = draw(underlyings)
    logging.info(
        'TD underlying distribution: %s; parameters used: %s', to_transform, [
            k
            for k, v in six.iteritems(to_transform.parameters) if v is not None
        ])
    # TODO(bjp): Add test coverage for `event_shape` argument of
    # `TransformedDistribution`.
    result_dist = tfd.TransformedDistribution(bijector=bijector,
                                              distribution=to_transform,
                                              batch_shape=batch_shape_arg,
                                              validate_args=True)
    if batch_shape != result_dist.batch_shape:
        msg = ('TransformedDistribution strategy generated a bad batch shape '
               'for {}, should have been {}.').format(result_dist, batch_shape)
        raise AssertionError(msg)
    return result_dist
Esempio n. 13
0
def independents(draw,
                 batch_shape=None,
                 event_dim=None,
                 enable_vars=False,
                 depth=None):
    """Strategy for drawing `Independent` distributions.

  The underlying distribution is drawn from the `distributions` strategy.

  Args:
    draw: Hypothesis MacGuffin.  Supplied by `@hps.composite`.
    batch_shape: An optional `TensorShape`.  The batch shape of the resulting
      `Independent` distribution.  Note that the underlying distribution will in
      general have a higher-rank batch shape, to make room for reinterpreting
      some of those dimensions as the `Independent`'s event.  Hypothesis will
      pick one if omitted.
    event_dim: Optional Python int giving the size of each of the underlying
      distribution's parameters' event dimensions.  This is shared across all
      parameters, permitting square event matrices, compatible location and
      scale Tensors, etc. If omitted, Hypothesis will choose one.
    enable_vars: TODO(bjp): Make this `True` all the time and put variable
      initialization in slicing_test.  If `False`, the returned parameters are
      all Tensors, never Variables or DeferredTensor.
    depth: Python `int` giving maximum nesting depth of compound Distributions.

  Returns:
    dists: A strategy for drawing `Independent` distributions with the specified
      `batch_shape` (or an arbitrary one if omitted).
  """
    if depth is None:
        depth = draw(depths())

    reinterpreted_batch_ndims = draw(hps.integers(min_value=0, max_value=2))
    if batch_shape is None:
        batch_shape = draw(
            tfp_hps.batch_shapes(min_ndims=reinterpreted_batch_ndims))
    else:  # This independent adds some batch dims to its underlying distribution.
        batch_shape = tensorshape_util.concatenate(
            batch_shape,
            draw(
                tfp_hps.batch_shapes(min_ndims=reinterpreted_batch_ndims,
                                     max_ndims=reinterpreted_batch_ndims)))
    underlying = draw(
        distributions(batch_shape=batch_shape,
                      event_dim=event_dim,
                      enable_vars=enable_vars,
                      depth=depth - 1))
    logging.info(
        'underlying distribution: %s; parameters used: %s', underlying,
        [k for k, v in six.iteritems(underlying.parameters) if v is not None])
    result_dist = tfd.Independent(
        underlying,
        reinterpreted_batch_ndims=reinterpreted_batch_ndims,
        validate_args=True)
    expected_shape = batch_shape[:len(batch_shape) - reinterpreted_batch_ndims]
    if expected_shape != result_dist.batch_shape:
        msg = ('Independent strategy generated a bad batch shape '
               'for {}, should have been {}.').format(result_dist,
                                                      expected_shape)
        raise AssertionError(msg)
    return result_dist