def test_gaussian_kernel_dx_i_dx_j_component_equals_gaussian_kernel_dx_i_dx_j( ): D = 4 x = np.random.randn(D) y = np.random.randn(D) sigma = 0.5 dx_i_dx_j = gaussian_kernel_dx_i_dx_j(x, y, sigma) for i in range(D): a = gaussian_kernel_dx_i_dx_j_component(x, y, i, sigma) assert_allclose(dx_i_dx_j[i], a)
def grad(x, basis, sigma, alpha, beta): m, D = basis.shape assert_array_shape(x, ndim=1, dims={0: D}) xi_grad = 0 betasum_grad = 0 for a, x_a in enumerate(basis): xi_grad += np.sum(gaussian_kernel_dx_i_dx_i_dx_j(x, x_a, sigma), axis=0) / m left_arg_hessian = gaussian_kernel_dx_i_dx_j(x, x_a, sigma) betasum_grad += beta[a, :].dot(left_arg_hessian) return alpha * xi_grad + betasum_grad
def test_gaussian_kernel_dx_i_dx_j_equals_SE_dx_i_dx_j(): D = 4 x = np.random.randn(D) y = np.random.randn(D) sigma = 0.5 implementation = gaussian_kernel_dx_i_dx_j(x, y, sigma) reference = SE_dx_i_dx_j(x.reshape(-1, 1), y.reshape(-1, 1), l=np.sqrt(sigma / 2.0)) assert_close(implementation, reference)
def grad_naive(x, X, sigma, alpha, beta): N, D = X.shape xi_grad = 0 betasum_grad = 0 for a, x_a in enumerate(X): xi_gradient_vec = gaussian_kernel_dx_i_dx_i_dx_j(x, x_a, sigma) left_arg_hessian = gaussian_kernel_dx_i_dx_j(x, x_a, sigma) for i in range(D): xi_grad += xi_gradient_vec[i] / N betasum_grad += beta[a, i] * left_arg_hessian[i] return alpha * xi_grad + betasum_grad