def test_add_mul_mix_3(backendopt): for datatype in backendopt: T.set_backend(datatype) x2 = ad.Variable(name="x2", shape=[3]) x3 = ad.Variable(name="x3", shape=[3]) z = x2 * x2 + x2 + x3 + 3 y = ad.sum(z * z + x3) grad_x2, grad_x3 = ad.gradients(y, [x2, x3]) executor = ad.Executor([y, grad_x2, grad_x3]) x2_val = 2 * T.ones(3) x3_val = 3 * T.ones(3) y_val, grad_x2_val, grad_x3_val = executor.run(feed_dict={ x2: x2_val, x3: x3_val }) z_val = x2_val * x2_val + x2_val + x3_val + 3 expected_yval = z_val * z_val + x3_val expected_grad_x2_val = 2 * \ (x2_val * x2_val + x2_val + x3_val + 3) * (2 * x2_val + 1) expected_grad_x3_val = 2 * (x2_val * x2_val + x2_val + x3_val + 3) + 1 assert isinstance(y, ad.Node) assert T.array_equal(y_val, T.sum(expected_yval)) assert T.array_equal(grad_x2_val, expected_grad_x2_val) assert T.array_equal(grad_x3_val, expected_grad_x3_val)
def test_add_mul_mix_2(backendopt): for datatype in backendopt: T.set_backend(datatype) x1 = ad.Variable(name="x1", shape=[3]) x2 = ad.Variable(name="x2", shape=[3]) x3 = ad.Variable(name="x3", shape=[3]) x4 = ad.Variable(name="x4", shape=[3]) y = ad.sum(x1 + x2 * x3 * x4) grad_x1, grad_x2, grad_x3, grad_x4 = ad.gradients(y, [x1, x2, x3, x4]) executor = ad.Executor([y, grad_x1, grad_x2, grad_x3, grad_x4]) x1_val = 1 * T.ones(3) x2_val = 2 * T.ones(3) x3_val = 3 * T.ones(3) x4_val = 4 * T.ones(3) y_val, grad_x1_val, grad_x2_val, grad_x3_val, grad_x4_val = executor.run( feed_dict={ x1: x1_val, x2: x2_val, x3: x3_val, x4: x4_val }) assert isinstance(y, ad.Node) assert T.array_equal(y_val, T.sum(x1_val + x2_val * x3_val * x4_val)) assert T.array_equal(grad_x1_val, T.ones_like(x1_val)) assert T.array_equal(grad_x2_val, x3_val * x4_val) assert T.array_equal(grad_x3_val, x2_val * x4_val) assert T.array_equal(grad_x4_val, x2_val * x3_val)
def conjugate_gradient(hess_fn, grads, error_tol, max_iters=250, x0=None): ''' This solves the following problem: hess_fn(x) = grad return: (x) ''' if not x0: x0 = [T.ones(grad.shape) for grad in grads] hvps = hess_fn(x0) r = group_minus(hvps, grads) p = group_negative(r) r_k_norm = group_dot(r, r) i = 0 while True: Ap = hess_fn(p) alpha = r_k_norm / group_dot(p, Ap) x0 = group_add(x0, group_product(alpha, p)) r = group_add(r, group_product(alpha, Ap)) r_kplus1_norm = group_dot(r, r) beta = r_kplus1_norm / r_k_norm r_k_norm = r_kplus1_norm if float(r_kplus1_norm) < error_tol: break p = group_minus(group_product(beta, p), r) if i > max_iters: print(f'CG max iter reached.') break i += 1 return x0
def test_mul_two_vars(backendopt): for datatype in backendopt: T.set_backend(datatype) x2 = ad.Variable(name="x2", shape=[3]) x3 = ad.Variable(name="x3", shape=[3]) y = ad.sum(x2 * x3) grad_x2, grad_x3 = ad.gradients(y, [x2, x3]) executor = ad.Executor([y, grad_x2, grad_x3]) x2_val = 2 * T.ones(3) x3_val = 3 * T.ones(3) y_val, grad_x2_val, grad_x3_val = executor.run(feed_dict={ x2: x2_val, x3: x3_val }) assert isinstance(y, ad.Node) assert T.array_equal(y_val, T.sum(x2_val * x3_val)) assert T.array_equal(grad_x2_val, x3_val) assert T.array_equal(grad_x3_val, x2_val)
def test_negative(backendopt): for datatype in backendopt: T.set_backend(datatype) x2 = ad.Variable(name="x2", shape=[3]) y = ad.sum(-x2) grad_x2, = ad.gradients(y, [x2]) executor = ad.Executor([y, grad_x2]) x2_val = 2 * T.ones(3) y_val, grad_x2_val = executor.run(feed_dict={x2: x2_val}) assert isinstance(y, ad.Node) assert T.array_equal(y_val, T.sum(-x2_val)) assert T.array_equal(grad_x2_val, -T.ones_like(x2_val))
def test_jacobian_summation_einsum_2(backendopt): for datatype in backendopt: T.set_backend(datatype) x = ad.Variable(name="x", shape=[2, 2]) y = ad.Variable(name="y", shape=[2, 2]) out = ad.einsum('ij,ab->ab', x, y) grad_x, = ad.jacobians(out, [x]) executor = ad.Executor([out, grad_x]) x_val = T.tensor([[1., 2.], [3., 4.]]) y_val = T.tensor([[5., 6.], [7., 8.]]) out_val, grad_x_val = executor.run(feed_dict={x: x_val, y: y_val}) expected_out_val = T.einsum('ij,ab->ab', x_val, y_val) expected_grad_x_val = T.einsum('ij,ab->abij', T.ones(x_val.shape), y_val) assert T.array_equal(out_val, expected_out_val) assert T.array_equal(grad_x_val, expected_grad_x_val)