예제 #1
0
    def test_softshrink(self, dtype):
        x = tf.constant([-2.0, -1.0, 0.0, 1.0, 2.0], dtype=dtype)
        expected_result = tf.constant([-1.5, -0.5, 0.0, 0.5, 1.5], dtype=dtype)
        self.assertAllCloseAccordingToType(softshrink(x), expected_result)

        expected_result = tf.constant([-1.0, 0.0, 0.0, 0.0, 1.0], dtype=dtype)
        self.assertAllCloseAccordingToType(
            softshrink(x, lower=-1.0, upper=1.0), expected_result)
예제 #2
0
def test_softshrink(dtype):
    x = tf.constant([-2.0, -1.0, 0.0, 1.0, 2.0], dtype=dtype)
    expected_result = tf.constant([-1.5, -0.5, 0.0, 0.5, 1.5], dtype=dtype)
    test_utils.assert_allclose_according_to_type(softshrink(x),
                                                 expected_result)

    expected_result = tf.constant([-1.0, 0.0, 0.0, 0.0, 1.0], dtype=dtype)
    test_utils.assert_allclose_according_to_type(
        softshrink(x, lower=-1.0, upper=1.0), expected_result)
예제 #3
0
    def test_unknown_shape(self):
        fn = softshrink.get_concrete_function(
            tf.TensorSpec(shape=None, dtype=tf.float32))

        for shape in [(1, ), (1, 2), (1, 2, 3), (1, 2, 3, 4)]:
            x = tf.ones(shape=shape, dtype=tf.float32)
            self.assertAllClose(fn(x), softshrink(x))
예제 #4
0
def verify_funcs_are_equivalent(dtype):
    x_np = np.random.uniform(-10, 10, size=(4, 4)).astype(dtype)
    x = tf.convert_to_tensor(x_np)
    lower = np.random.uniform(-10, 10)
    upper = lower + np.random.uniform(0, 10)

    with tf.GradientTape(persistent=True) as t:
        t.watch(x)
        y_native = softshrink(x, lower, upper)
        y_py = _softshrink_py(x, lower, upper)

    test_utils.assert_allclose_according_to_type(y_native, y_py)

    grad_native = t.gradient(y_native, x)
    grad_py = t.gradient(y_py, x)

    test_utils.assert_allclose_according_to_type(grad_native, grad_py)
예제 #5
0
    def verify_funcs_are_equivalent(self, dtype):
        x_np = np.random.uniform(-10, 10, size=(4, 4)).astype(dtype)
        x = tf.convert_to_tensor(x_np)
        lower = np.random.uniform(-10, 10)
        upper = lower + np.random.uniform(0, 10)

        with tf.GradientTape(persistent=True) as t:
            t.watch(x)
            y_native = softshrink(x, lower, upper)
            y_py = _softshrink_py(x, lower, upper)

        self.assertAllCloseAccordingToType(y_native, y_py, atol=1e-4)

        grad_native = t.gradient(y_native, x)
        grad_py = t.gradient(y_py, x)

        self.assertAllCloseAccordingToType(grad_native, grad_py, atol=1e-4)
예제 #6
0
 def test_invalid(self):
     with self.assertRaisesOpError(
             "lower must be less than or equal to upper."):  # pylint: disable=bad-continuation
         y = softshrink(tf.ones(shape=(1, 2, 3)), lower=2.0, upper=-2.0)
         self.evaluate(y)