Ejemplo n.º 1
0
 def test_with_some_dynamic_shapes_works(self):
     x = array_ops.ones((2, 1, 3))
     y = array_ops.placeholder(x.dtype)
     z = array_ops.ones(())
     with self.test_session() as sess:
         bcast_shape = sess.run(
             distribution_util.get_broadcast_shape(x, y, z),
             feed_dict={y: np.ones((1, 5, 3)).astype(np.float32)})
         self.assertAllEqual([2, 5, 3], bcast_shape)
Ejemplo n.º 2
0
 def test_with_some_dynamic_shapes_works(self):
   x = array_ops.ones((2, 1, 3))
   y = array_ops.placeholder(x.dtype)
   z = array_ops.ones(())
   with self.test_session() as sess:
     bcast_shape = sess.run(
         distribution_util.get_broadcast_shape(x, y, z),
         feed_dict={y: np.ones((1, 5, 3)).astype(np.float32)})
     self.assertAllEqual([2, 5, 3], bcast_shape)
Ejemplo n.º 3
0
    def __init__(self,
                 concentration1=None,
                 concentration0=None,
                 validate_args=False,
                 allow_nan_stats=True,
                 name="Kumaraswamy"):
        """Initialize a batch of Kumaraswamy distributions.

    Args:
      concentration1: Positive floating-point `Tensor` indicating mean
        number of successes; aka "alpha". Implies `self.dtype` and
        `self.batch_shape`, i.e.,
        `concentration1.shape = [N1, N2, ..., Nm] = self.batch_shape`.
      concentration0: Positive floating-point `Tensor` indicating mean
        number of failures; aka "beta". Otherwise has same semantics as
        `concentration1`.
      validate_args: Python `bool`, default `False`. When `True` distribution
        parameters are checked for validity despite possibly degrading runtime
        performance. When `False` invalid inputs may silently render incorrect
        outputs.
      allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
        (e.g., mean, mode, variance) use the value "`NaN`" to indicate the
        result is undefined. When `False`, an exception is raised if one or
        more of the statistic's batch members are undefined.
      name: Python `str` name prefixed to Ops created by this class.
    """
        with ops.name_scope(name, values=[concentration1,
                                          concentration0]) as name:
            concentration1 = ops.convert_to_tensor(concentration1,
                                                   name="concentration1")
            concentration0 = ops.convert_to_tensor(concentration0,
                                                   name="concentration0")
        super(Kumaraswamy, self).__init__(
            distribution=uniform.Uniform(
                low=array_ops.zeros([], dtype=concentration1.dtype),
                high=array_ops.ones([], dtype=concentration1.dtype),
                allow_nan_stats=allow_nan_stats),
            bijector=bijectors.Kumaraswamy(concentration1=concentration1,
                                           concentration0=concentration0,
                                           validate_args=validate_args),
            batch_shape=distribution_util.get_broadcast_shape(
                concentration1, concentration0),
            name=name)
        self._reparameterization_type = distribution.FULLY_REPARAMETERIZED
Ejemplo n.º 4
0
  def __init__(self,
               concentration1=None,
               concentration0=None,
               validate_args=False,
               allow_nan_stats=True,
               name="Kumaraswamy"):
    """Initialize a batch of Kumaraswamy distributions.

    Args:
      concentration1: Positive floating-point `Tensor` indicating mean
        number of successes; aka "alpha". Implies `self.dtype` and
        `self.batch_shape`, i.e.,
        `concentration1.shape = [N1, N2, ..., Nm] = self.batch_shape`.
      concentration0: Positive floating-point `Tensor` indicating mean
        number of failures; aka "beta". Otherwise has same semantics as
        `concentration1`.
      validate_args: Python `bool`, default `False`. When `True` distribution
        parameters are checked for validity despite possibly degrading runtime
        performance. When `False` invalid inputs may silently render incorrect
        outputs.
      allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
        (e.g., mean, mode, variance) use the value "`NaN`" to indicate the
        result is undefined. When `False`, an exception is raised if one or
        more of the statistic's batch members are undefined.
      name: Python `str` name prefixed to Ops created by this class.
    """
    with ops.name_scope(name, values=[concentration1, concentration0]) as name:
      concentration1 = ops.convert_to_tensor(
          concentration1, name="concentration1")
      concentration0 = ops.convert_to_tensor(
          concentration0, name="concentration0")
    super(Kumaraswamy, self).__init__(
        distribution=uniform.Uniform(
            low=array_ops.zeros([], dtype=concentration1.dtype),
            high=array_ops.ones([], dtype=concentration1.dtype),
            allow_nan_stats=allow_nan_stats),
        bijector=bijectors.Kumaraswamy(
            concentration1=concentration1, concentration0=concentration0,
            validate_args=validate_args),
        batch_shape=distribution_util.get_broadcast_shape(
            concentration1, concentration0),
        name=name)
    self._reparameterization_type = distribution.FULLY_REPARAMETERIZED
Ejemplo n.º 5
0
 def test_all_static_shapes_work(self):
     x = array_ops.ones((2, 1, 3))
     y = array_ops.ones((1, 5, 3))
     z = array_ops.ones(())
     self.assertAllEqual([2, 5, 3],
                         distribution_util.get_broadcast_shape(x, y, z))
Ejemplo n.º 6
0
  def __init__(self,
               loc,
               scale,
               skewness=None,
               tailweight=None,
               distribution=None,
               validate_args=False,
               allow_nan_stats=True,
               name="SinhArcsinh"):
    """Construct SinhArcsinh distribution on `(-inf, inf)`.

    Arguments `(loc, scale, skewness, tailweight)` must have broadcastable shape
    (indexing batch dimensions).  They must all have the same `dtype`.

    Args:
      loc: Floating-point `Tensor`.
      scale:  `Tensor` of same `dtype` as `loc`.
      skewness:  Skewness parameter.  Default is `0.0` (no skew).
      tailweight:  Tailweight parameter. Default is `1.0` (unchanged tailweight)
      distribution: `tf.Distribution`-like instance. Distribution that is
        transformed to produce this distribution.
        Default is `tf.distributions.Normal(0., 1.)`.
        Must be a scalar-batch, scalar-event distribution.  Typically
        `distribution.reparameterization_type = FULLY_REPARAMETERIZED` or it is
        a function of non-trainable parameters. WARNING: If you backprop through
        a `SinhArcsinh` sample and `distribution` is not
        `FULLY_REPARAMETERIZED` yet is a function of trainable variables, then
        the gradient will be incorrect!
      validate_args: Python `bool`, default `False`. When `True` distribution
        parameters are checked for validity despite possibly degrading runtime
        performance. When `False` invalid inputs may silently render incorrect
        outputs.
      allow_nan_stats: Python `bool`, default `True`. When `True`,
        statistics (e.g., mean, mode, variance) use the value "`NaN`" to
        indicate the result is undefined. When `False`, an exception is raised
        if one or more of the statistic's batch members are undefined.
      name: Python `str` name prefixed to Ops created by this class.
    """
    parameters = distribution_util.parent_frame_arguments()

    with ops.name_scope(name,
                        values=[loc, scale, skewness, tailweight]) as name:
      loc = ops.convert_to_tensor(loc, name="loc")
      dtype = loc.dtype
      scale = ops.convert_to_tensor(scale, name="scale", dtype=dtype)
      tailweight = 1. if tailweight is None else tailweight
      has_default_skewness = skewness is None
      skewness = 0. if skewness is None else skewness
      tailweight = ops.convert_to_tensor(
          tailweight, name="tailweight", dtype=dtype)
      skewness = ops.convert_to_tensor(skewness, name="skewness", dtype=dtype)

      batch_shape = distribution_util.get_broadcast_shape(
          loc, scale, tailweight, skewness)

      # Recall, with Z a random variable,
      #   Y := loc + C * F(Z),
      #   F(Z) := Sinh( (Arcsinh(Z) + skewness) * tailweight )
      #   F_0(Z) := Sinh( Arcsinh(Z) * tailweight )
      #   C := 2 * scale / F_0(2)
      if distribution is None:
        distribution = normal.Normal(
            loc=array_ops.zeros([], dtype=dtype),
            scale=array_ops.ones([], dtype=dtype),
            allow_nan_stats=allow_nan_stats)
      else:
        asserts = distribution_util.maybe_check_scalar_distribution(
            distribution, dtype, validate_args)
        if asserts:
          loc = control_flow_ops.with_dependencies(asserts, loc)

      # Make the SAS bijector, 'F'.
      f = bijectors.SinhArcsinh(
          skewness=skewness, tailweight=tailweight)
      if has_default_skewness:
        f_noskew = f
      else:
        f_noskew = bijectors.SinhArcsinh(
            skewness=skewness.dtype.as_numpy_dtype(0.),
            tailweight=tailweight)

      # Make the AffineScalar bijector, Z --> loc + scale * Z (2 / F_0(2))
      c = 2 * scale / f_noskew.forward(ops.convert_to_tensor(2, dtype=dtype))
      affine = bijectors.AffineScalar(
          shift=loc,
          scale=c,
          validate_args=validate_args)

      bijector = bijectors.Chain([affine, f])

      super(SinhArcsinh, self).__init__(
          distribution=distribution,
          bijector=bijector,
          batch_shape=batch_shape,
          validate_args=validate_args,
          name=name)
    self._parameters = parameters
    self._loc = loc
    self._scale = scale
    self._tailweight = tailweight
    self._skewness = skewness
Ejemplo n.º 7
0
  def __init__(self,
               loc,
               scale,
               skewness=None,
               tailweight=None,
               distribution=None,
               validate_args=False,
               allow_nan_stats=True,
               name="SinhArcsinh"):
    """Construct SinhArcsinh distribution on `(-inf, inf)`.

    Arguments `(loc, scale, skewness, tailweight)` must have broadcastable shape
    (indexing batch dimensions).  They must all have the same `dtype`.

    Args:
      loc: Floating-point `Tensor`.
      scale:  `Tensor` of same `dtype` as `loc`.
      skewness:  Skewness parameter.  Default is `0.0` (no skew).
      tailweight:  Tailweight parameter. Default is `1.0` (unchanged tailweight)
      distribution: `tf.Distribution`-like instance. Distribution that is
        transformed to produce this distribution.
        Default is `tf.distributions.Normal(0., 1.)`.
        Must be a scalar-batch, scalar-event distribution.  Typically
        `distribution.reparameterization_type = FULLY_REPARAMETERIZED` or it is
        a function of non-trainable parameters. WARNING: If you backprop through
        a `SinhArcsinh` sample and `distribution` is not
        `FULLY_REPARAMETERIZED` yet is a function of trainable variables, then
        the gradient will be incorrect!
      validate_args: Python `bool`, default `False`. When `True` distribution
        parameters are checked for validity despite possibly degrading runtime
        performance. When `False` invalid inputs may silently render incorrect
        outputs.
      allow_nan_stats: Python `bool`, default `True`. When `True`,
        statistics (e.g., mean, mode, variance) use the value "`NaN`" to
        indicate the result is undefined. When `False`, an exception is raised
        if one or more of the statistic's batch members are undefined.
      name: Python `str` name prefixed to Ops created by this class.
    """
    parameters = locals()

    with ops.name_scope(name, values=[loc, scale, skewness, tailweight]):
      loc = ops.convert_to_tensor(loc, name="loc")
      dtype = loc.dtype
      scale = ops.convert_to_tensor(scale, name="scale", dtype=dtype)
      tailweight = 1. if tailweight is None else tailweight
      has_default_skewness = skewness is None
      skewness = 0. if skewness is None else skewness
      tailweight = ops.convert_to_tensor(
          tailweight, name="tailweight", dtype=dtype)
      skewness = ops.convert_to_tensor(skewness, name="skewness", dtype=dtype)

      batch_shape = distribution_util.get_broadcast_shape(
          loc, scale, tailweight, skewness)

      # Recall, with Z a random variable,
      #   Y := loc + C * F(Z),
      #   F(Z) := Sinh( (Arcsinh(Z) + skewness) * tailweight )
      #   F_0(Z) := Sinh( Arcsinh(Z) * tailweight )
      #   C := 2 * scale / F_0(2)
      if distribution is None:
        distribution = normal.Normal(
            loc=array_ops.zeros([], dtype=dtype),
            scale=array_ops.ones([], dtype=dtype),
            allow_nan_stats=allow_nan_stats)
      else:
        asserts = distribution_util.maybe_check_scalar_distribution(
            distribution, dtype, validate_args)
        if asserts:
          loc = control_flow_ops.with_dependencies(asserts, loc)

      # Make the SAS bijector, 'F'.
      f = bijectors.SinhArcsinh(
          skewness=skewness, tailweight=tailweight)
      if has_default_skewness:
        f_noskew = f
      else:
        f_noskew = bijectors.SinhArcsinh(
            skewness=skewness.dtype.as_numpy_dtype(0.),
            tailweight=tailweight)

      # Make the AffineScalar bijector, Z --> loc + scale * Z (2 / F_0(2))
      c = 2 * scale / f_noskew.forward(ops.convert_to_tensor(2, dtype=dtype))
      affine = bijectors.AffineScalar(
          shift=loc,
          scale=c,
          validate_args=validate_args)

      bijector = bijectors.Chain([affine, f])

      super(SinhArcsinh, self).__init__(
          distribution=distribution,
          bijector=bijector,
          batch_shape=batch_shape,
          validate_args=validate_args,
          name=name)
    self._parameters = parameters
    self._loc = loc
    self._scale = scale
    self._tailweight = tailweight
    self._skewness = skewness
Ejemplo n.º 8
0
 def test_all_static_shapes_work(self):
   x = array_ops.ones((2, 1, 3))
   y = array_ops.ones((1, 5, 3))
   z = array_ops.ones(())
   self.assertAllEqual([2, 5, 3],
                       distribution_util.get_broadcast_shape(x, y, z))