def test_soft_round_values_and_gradients_are_finite(self, alpha): x = tf.linspace(0., 1., 11) # covers exact integers and half-integers with tf.GradientTape() as tape: tape.watch(x) y = round_ops.soft_round(x, alpha=alpha) dy = tape.gradient(y, x) self.assertAllEqual(tf.math.is_finite(y), tf.ones(x.shape, dtype=bool)) self.assertAllEqual(tf.math.is_finite(dy), tf.ones(x.shape, dtype=bool))
def test_soft_round_layer_soft_rounds(self): alpha = 5.0 layer = soft_round.SoftRound(alpha=alpha) x = tf.linspace(-5.0, 5.0, num=50) y = layer(x) self.assertAllClose(y, round_ops.soft_round(x, alpha=alpha))
def transform(self, x): return round_ops.soft_round(x, self._alpha)
def test_soft_inverse_is_actual_inverse(self): x = tf.constant([-1.25, -0.75, 0.75, 1.25], dtype=tf.float32) y = round_ops.soft_round(x, alpha=2.0) x2 = round_ops.soft_round_inverse(y, alpha=2.0) self.assertAllClose(x, x2)
def test_soft_round_large_alpha_is_round(self): # We don't care what happens exactly near half-integer values: for offset in range(-5, 5): x = tf.linspace(offset - 0.499, offset + 0.499, 100) y = round_ops.soft_round(x, alpha=2000.0) self.assertAllClose(tf.round(x), y, atol=0.02)
def test_soft_round_small_alpha_is_identity(self): x = tf.linspace(-2., 2., 50) y = round_ops.soft_round(x, alpha=1e-13) self.assertAllClose(x, y)