def test_diffs(self): x = tf.constant([1, 2, 3, 4, 5]) dx = self.evaluate(math.diff(x, order=1, exclusive=False)) np.testing.assert_array_equal(dx, [1, 1, 1, 1, 1]) dx1 = self.evaluate(math.diff(x, order=1, exclusive=True)) np.testing.assert_array_equal(dx1, [1, 1, 1, 1]) dx2 = self.evaluate(math.diff(x, order=2, exclusive=False)) np.testing.assert_array_equal(dx2, [1, 2, 2, 2, 2])
def test_diffs_differentiable(self): """Tests that the diffs op is differentiable.""" x = tf.constant(2.0) xv = tf.stack([x, x * x, x * x * x], axis=0) # Produces [x, x^2 - x, x^3 - x^2] dxv = self.evaluate(math.diff(xv)) np.testing.assert_array_equal(dxv, [2., 2., 4.]) grad = self.evaluate(tf.gradients(math.diff(xv), x)[0]) # Note that TF gradients adds up the components of the jacobian. # The sum of [1, 2x-1, 3x^2-2x] at x = 2 is 12. self.assertEqual(grad, 12.0)
def test_batched_axis(self, exclusive, axis, dx_true): """Tests batch diff works with axis argument use of exclusivity.""" x = tf.constant([[1, 2, 3, 4], [10, 20, 30, 40]]) dx = self.evaluate( math.diff(x, order=1, exclusive=exclusive, axis=axis)) self.assertAllEqual(dx, dx_true)
def test_batch_diff(self): """Tests that the diffs op works on batched inputs.""" x = tf.constant([[1, 2, 3, 4], [10, 20, 30, 40]]) dx_true = np.array([[1, 1, 1], [10, 10, 10]]) dx = self.evaluate(math.diff(x, order=1, exclusive=True)) self.assertAllEqual(dx, dx_true)