Ejemplo n.º 1
0
 def testAffineSoftplusIsCentered(self):
   """Check that affine_softplus(0) == 1."""
   for _ in range(10):
     lo = np.random.uniform(0., 0.1)
     ref = np.random.uniform(0.2, 10.)
     y = util.affine_softplus(np.array(0.), lo=lo, ref=ref)
     self.assertAllClose(y, ref)
Ejemplo n.º 2
0
 def testAffineSoftplusSpansRange(self):
     """Check that affine_softplus()'s output is in [lo, infinity]."""
     x = np.finfo(np.float32).max * np.array([-1, 1], dtype=np.float32)
     for _ in range(10):
         lo = np.random.uniform(0., 0.1)
         ref = np.random.uniform(0.2, 10.)
         y = util.affine_softplus(x, lo=lo, ref=ref)
         self.assertAllClose(y[0], lo)
         self.assertAllGreater(y[1], 1e10)
Ejemplo n.º 3
0
 def testAffineSoftplusRoundTrip(self):
     """Check that x = inv_affine_softplus(affine_softplus(x)) in general."""
     x = np.float32(np.linspace(-10., 10., 1000))
     for _ in range(10):
         lo = np.random.uniform(0., 0.1)
         ref = np.random.uniform(0.2, 10.)
         y = util.affine_softplus(x, lo=lo, ref=ref)
         x_recon = util.inv_affine_softplus(y, lo=lo, ref=ref)
         self.assertAllClose(x, x_recon, atol=1e-5, rtol=1e-3)
Ejemplo n.º 4
0
def _construct_scale(x, scale_lo, scale_init, float_dtype):
  """Helper function for constructing scale variables."""
  if scale_lo == scale_init:
    # If the difference between the minimum and initial scale is zero, then
    # we just fix `scale` to be a constant.
    scale = tf.tile(
        tf.cast(scale_init, float_dtype)[tf.newaxis, tf.newaxis],
        (1, x.shape[1]))
  else:
    # Otherwise we construct a "latent" scale variable and define `scale`
    # As an affine function of a softplus on that latent variable.
    latent_scale = tf.compat.v1.get_variable(
        'LatentScale', initializer=tf.zeros((1, x.shape[1]), float_dtype))
    scale = util.affine_softplus(latent_scale, lo=scale_lo, ref=scale_init)
  return scale
Ejemplo n.º 5
0
  def scale(self):
    """Returns the loss's current scale parameters.

    Returns:
      a TF tensor of size (1, self._num_channels) and type self._float_dtype,
      containing the current estimated scale parameter for each channel,
      which will presumably change during optimization. This tensor is a
      function of the latent scale tensor being optimized over, and is not a
      TF variable itself.
    """
    if self._scale_lo == self._scale_init:
      # If the difference between the minimum and initial scale is zero, then
      # we just fix `scale` to be a constant.
      return tf.tile(
          tf.cast(self._scale_init, self._float_dtype)[tf.newaxis, tf.newaxis],
          (1, self._num_channels))
    else:
      return util.affine_softplus(
          self._latent_scale, lo=self._scale_lo, ref=self._scale_init)
Ejemplo n.º 6
0
 def testDefaultAffineSoftplusRoundTrip(self):
     """Check that x = inv_affine_softplus(affine_softplus(x)) by default."""
     x = np.float32(np.linspace(-10., 10., 1000))
     y = util.affine_softplus(x)
     x_recon = util.inv_affine_softplus(y)
     self.assertAllClose(x, x_recon, atol=1e-5, rtol=1e-3)