def test_score_matching_sym_returns_min_1d_grid(): N = 100 D = 3 m = 1 omega = np.random.randn(D, m) u = np.random.uniform(0, 2 * np.pi, m) X = np.random.randn(N, D) C = compute_C_memory(X, omega, u) b = compute_b_memory(X, omega, u) lmbda = .001 theta = score_matching_sym(X, lmbda, omega, u) J = objective(X, theta, lmbda, omega, u, b, C) thetas_test = np.linspace(theta - 3, theta + 3) Js = np.zeros(len(thetas_test)) for i, theta_test in enumerate(thetas_test): Js[i] = objective(X, np.array([theta_test]), lmbda, omega, u, b, C) # plt.plot(thetas_test, Js) # plt.plot([theta, theta], [Js.min(), Js.max()]) # plt.title(str(theta)) # plt.show() assert_almost_equal(Js.min(), J, delta=thetas_test[1] - thetas_test[0]) assert_almost_equal(thetas_test[Js.argmin()], theta[0], delta=thetas_test[1] - thetas_test[0])
def test_objective_sym_equals_completely_manual_manually(): N = 100 D = 3 m = 3 omega = np.random.randn(D, m) u = np.random.uniform(0, 2 * np.pi, m) X = np.random.randn(N, D) lmbda = 1. theta = np.random.randn(m) J_manual = 0. for n in range(N): b_manual = np.zeros(m) C_manual = np.zeros((m, m)) J_n_manual = 0. for d in range(D): b_term_manual = -np.sqrt(2. / m) * np.cos(np.dot(X[n], omega) + u) * (omega[d, :]**2) b_term = feature_map_derivative2_d(X[n], omega, u, d) assert_allclose(b_term_manual, b_term) b_manual -= b_term_manual J_manual += np.dot(b_term_manual, theta) J_n_manual += np.dot(b_term_manual, theta) c_vec_manual = -np.sqrt(2. / m) * np.sin(np.dot(X[n], omega) + u) * omega[d, :] c_vec = feature_map_derivative_d(X[n], omega, u, d) assert_allclose(c_vec_manual, c_vec) C_term = np.outer(c_vec_manual, c_vec_manual) C_manual += C_term # not regularised here, done afterwards J_manual += 0.5 * np.dot(theta, np.dot(C_term, theta)) J_n_manual += 0.5 * np.dot(theta, np.dot(C_term, theta)) b = compute_b_memory(X[n].reshape(1, m), omega, u) C = compute_C_memory(X[n].reshape(1, m), omega, u) assert_allclose(b_manual, b) assert_allclose(C_manual, C) # discard regularisation for these internal checks J_n = objective(X[n].reshape(1, m), theta, 0, omega, u) J_n_2 = 0.5 * np.dot(theta, np.dot(C, theta)) - np.dot(theta, b) assert_allclose(J_n_2, J_n, rtol=1e-4) assert_allclose(J_n_manual, J_n, rtol=1e-4) J_manual /= N J_manual += 0.5 * lmbda * np.dot(theta, theta) J = objective(X, theta, lmbda, omega, u) assert_close(J, J_manual, decimal=5)
def update_plot(val=None): global omega, u print("Updating plot") lmbda = 2**s_lmbda.val sigma = 2**s_sigma.val b = compute_b(Z, omega, u) C = compute_C(Z, omega, u) theta = score_matching_sym(Z, lmbda, omega, u, b, C) J = objective(Z, theta, lmbda, omega, u, b, C) J_xval = np.mean( xvalidate(Z, lmbda, omega, u, n_folds=5, num_repetitions=3)) logq_est = lambda x: np.dot(theta, feature_map_single(x, omega, u)) dlogq_est = lambda x: np.dot(theta, feature_map_grad_single(x, omega, u)) description = "N=%d, sigma: %.2f, lambda: %.2f, m=%.d, J=%.2f, J_xval=%.2f" % \ (N, sigma, lmbda, m, J, J_xval) if plot_pdf: D = evaluate_density_grid(Xs, Ys, logq_est) description = "log-pdf: " + description else: D = evaluate_density_grad_grid(Xs, Ys, dlogq_est) description = "norm-grad-log-pdf: " + description ax.clear() ax.plot(Z[:, 0], Z[:, 1], 'bx') plot_array(Xs, Ys, D, ax, plot_contour=True) ax.set_title(description) fig.canvas.draw_idle()
def test_objective_sym_given_b_C_equals_given_nothing(): N = 100 D = 3 m = 10 omega = np.random.randn(D, m) u = np.random.uniform(0, 2 * np.pi, m) X = np.random.randn(N, D) lmbda = 1. C = compute_C_memory(X, omega, u) b = compute_b_memory(X, omega, u) theta = np.random.randn(m) J = objective(X, theta, lmbda, omega, u, b, C) J2 = objective(X, theta, lmbda, omega, u) assert_close(J, J2)
def test_score_matching_sym_returns_min_random_search(): N = 100 D = 3 m = 10 omega = np.random.randn(D, m) u = np.random.uniform(0, 2 * np.pi, m) X = np.random.randn(N, D) C = compute_C_memory(X, omega, u) b = compute_b_memory(X, omega, u) lmbda = 1. theta = score_matching_sym(X, lmbda, omega, u) J = objective(X, theta, lmbda, omega, u, b, C) for noise in [0.0001, 0.001, 0.1, 1, 10, 100]: for _ in range(10): theta_test = np.random.randn(m) * noise + theta J_test = objective(X, theta_test, lmbda, omega, u, b, C) assert_less_equal(J, J_test)
def test_objective_sym_equals_half_manual(): N = 100 D = 3 m = 10 omega = np.random.randn(D, m) u = np.random.uniform(0, 2 * np.pi, m) X = np.random.randn(N, D) lmbda = 1. theta = np.random.randn(m) J = objective(X, theta, lmbda, omega, u) J_manual = _objective_sym_half_manual(X, theta, lmbda, omega, u) assert_close(J_manual, J)
def test_objective_sym_given_b_C(): N = 100 D = 3 m = 10 omega = np.random.randn(D, m) u = np.random.uniform(0, 2 * np.pi, m) X = np.random.randn(N, D) lmbda = 1. C = compute_C_memory(X, omega, u) b = compute_b_memory(X, omega, u) theta = np.random.randn(m) J = objective(X, theta, lmbda, omega, u, b, C) J_manual = 0.5 * np.dot(theta.T, np.dot(C + np.eye(m) * lmbda, theta)) - np.dot(theta, b) assert_close(J, J_manual)
N = 500 Z = sample_gaussian(N, mu=np.zeros(D), Sigma=L, is_cholesky=True) # fit density in RKHS from oracle samples sigma = 0.5 gamma = 0.5 * (sigma ** 2) lmbda = 0.0008 m = N gamma = 0.5 * (sigma ** 2) omega, u = sample_basis(D, m, gamma) theta = score_matching_sym(Z, lmbda, omega, u) logq_est = lambda x: log_pdf_estimate(feature_map_single(x, omega, u), theta) dlogq_est = lambda x: log_pdf_estimate_grad(feature_map_grad_single(x, omega, u), theta) print("J=%.2f" % objective(Z, theta, lmbda, omega, u)) # plot density estimate plt.figure(figsize=(8, 4)) Xs = np.linspace(-5, 5) Ys = np.linspace(-5, 5) Xs_grad = np.linspace(-25, 25, 40) Ys_grad = np.linspace(-25, 25, 40) G = evaluate_density_grid(Xs, Ys, logq_est) G_norm, U, V, _, _ = evaluate_gradient_grid(Xs_grad, Ys_grad, dlogq_est) plt.subplot(121) plt.plot(Z[:, 0], Z[:, 1], 'bx') plot_array(Xs, Ys, np.exp(G), plot_contour=True) plt.subplot(122) plot_array(Xs_grad, Ys_grad, G_norm, plot_contour=True) plt.quiver(Xs_grad, Ys_grad, U, V, color='m')