def testGradOnUnsupportedType(self): in_shape = [1, 4, 6, 1] out_shape = [1, 2, 3, 1] x = np.arange(0, 24).reshape(in_shape).astype(np.uint8) with self.test_session(): input_tensor = constant_op.constant(x, shape=in_shape) resize_out = image_ops.resize_bicubic(input_tensor, out_shape[1:3]) grad = gradients_impl.gradients(input_tensor, [resize_out]) self.assertEqual([None], grad)
def testGradOnUnsupportedType(self): in_shape = [1, 4, 6, 1] out_shape = [1, 2, 3, 1] x = np.arange(0, 24).reshape(in_shape).astype(np.uint8) with self.cached_session(): input_tensor = constant_op.constant(x, shape=in_shape) resize_out = image_ops.resize_bicubic(input_tensor, out_shape[1:3]) grad = gradients_impl.gradients(input_tensor, [resize_out]) self.assertEqual([None], grad)
def testGradOnUnsupportedType(self, use_tape): with test_util.AbstractGradientTape(use_tape=use_tape) as tape: in_shape = [1, 4, 6, 1] out_shape = [1, 2, 3, 1] x = np.arange(0, 24).reshape(in_shape).astype(np.uint8) input_tensor = constant_op.constant(x, shape=in_shape) tape.watch(input_tensor) resize_out = image_ops.resize_bicubic(input_tensor, out_shape[1:3]) with self.cached_session(): grad = tape.gradient(resize_out, [input_tensor]) self.assertEqual([None], grad)
def testGradFromResizeToSmallerInBothDims(self): in_shape = [1, 4, 6, 1] out_shape = [1, 2, 3, 1] x = np.arange(0, 24).reshape(in_shape).astype(np.float32) for align_corners in [True, False]: with self.test_session(): input_tensor = constant_op.constant(x, shape=in_shape) resize_out = image_ops.resize_bicubic(input_tensor, out_shape[1:3], align_corners=align_corners) err = gradient_checker.compute_gradient_error( input_tensor, in_shape, resize_out, out_shape, x_init_value=x) self.assertLess(err, 1e-3)
def testShapeIsCorrectAfterOp(self): in_shape = [1, 2, 2, 1] out_shape = [1, 4, 6, 1] x = np.arange(0, 4).reshape(in_shape).astype(np.float32) for align_corners in [True, False]: input_tensor = constant_op.constant(x, shape=in_shape) resize_out = image_ops.resize_bicubic( input_tensor, out_shape[1:3], align_corners=align_corners) with self.cached_session(): self.assertEqual(out_shape, list(resize_out.get_shape())) resize_out = self.evaluate(resize_out) self.assertEqual(out_shape, list(resize_out.shape))
def testGradFromResizeToSmallerInBothDims(self): in_shape = [1, 4, 6, 1] out_shape = [1, 2, 3, 1] x = np.arange(0, 24).reshape(in_shape).astype(np.float32) for align_corners in [True, False]: input_tensor = constant_op.constant(x, shape=in_shape) resize_out = image_ops.resize_bicubic( input_tensor, out_shape[1:3], align_corners=align_corners) with self.cached_session(): err = gradient_checker.compute_gradient_error( input_tensor, in_shape, resize_out, out_shape, x_init_value=x) self.assertLess(err, 1e-3)
def testShapeIsCorrectAfterOp(self): in_shape = [1, 2, 2, 1] out_shape = [1, 4, 6, 1] x = np.arange(0, 4).reshape(in_shape).astype(np.float32) for align_corners in [True, False]: with self.test_session() as sess: input_tensor = constant_op.constant(x, shape=in_shape) resize_out = image_ops.resize_bicubic(input_tensor, out_shape[1:3], align_corners=align_corners) self.assertEqual(out_shape, list(resize_out.get_shape())) resize_out = sess.run(resize_out) self.assertEqual(out_shape, list(resize_out.shape))
def func(input_tensor, align_corners=align_corners): return image_ops.resize_bicubic(input_tensor, out_shape[1:3], align_corners=align_corners)