def _RsqrtGradGrad(op, grad): """Returns backprop gradient for f(a,b) = -0.5 * b * conj(a)^3.""" a = op.inputs[0] # a = x^{-1/2} b = op.inputs[1] # backprop gradient for a with ops.control_dependencies([grad]): ca = math_ops.conj(a) cg = math_ops.conj(grad) grad_a = -1.5 * cg * b * math_ops.square(ca) grad_b = gen_math_ops.rsqrt_grad(ca, grad) return grad_a, grad_b
def test_rsqrtgrad(self, shape): a = tf.placeholder(tf.float32, shape) b = tf.placeholder(tf.float32, shape) y = np.random.rand(*shape) dy = np.random.rand(*shape) out = rsqrt_grad(a, b) def run_test(sess): return sess.run(out, feed_dict={a: y, b: dy}) assert np.isclose(self.with_ngraph(run_test), self.without_ngraph(run_test)).all()
def _RsqrtGrad(op, grad): """Returns -0.5 * grad * conj(y)^3.""" y = op.outputs[0] # y = x^(-1/2) return gen_math_ops.rsqrt_grad(y, grad)
def rsqrt_forward(op, r_input): # y = x^(-0.5) # dy = -0.5*x^(-1.5)dx = -0.5*y^3dx with tf.name_scope('rsqrt_forward'): return [rsqrt_grad(op.outputs[0], r_input[0])]