Пример #1
0
def test_rff_feature_map_derivative_d_2n():
    X = np.array([[1.], [3.]])
    u = np.array([2.])
    omega = np.array([[2.]])
    d = 0
    phi_derivative = rff_feature_map_grad_d(X, omega, u, d)
    phi_derivative_manual = -np.sin(X * omega + u) * omega[:, d] * np.sqrt(2.)
    assert_close(phi_derivative, phi_derivative_manual)
Пример #2
0
def test_rff_feature_map_derivative_d_2n():
    X = np.array([[1.], [3.]])
    u = np.array([2.])
    omega = np.array([[2.]])
    d = 0
    phi_derivative = rff_feature_map_grad_d(X, omega, u, d)
    phi_derivative_manual = -np.sin(X * omega + u) * omega[:, d] * np.sqrt(2.)
    assert_close(phi_derivative, phi_derivative_manual)
Пример #3
0
def test_compute_C_1d2n():
    X = np.array([[1.], [2.]])
    u = np.array([2.])
    omega = np.array([[2.]])
    d = 0
    C_manual = np.mean(rff_feature_map_grad_d(X, omega, u, d)**2)
    C = compute_C_memory(X, omega, u)
    assert_allclose(C_manual, C)
Пример #4
0
def test_compute_C_1d1n():
    X = np.array([[1.]])
    u = np.array([2.])
    omega = np.array([[2.]])
    d = 0
    phi = rff_feature_map_grad_d(X, omega, u, d).flatten()
    C_manual = np.outer(phi, phi)
    C = compute_C_memory(X, omega, u)
    assert_allclose(C_manual, C)
Пример #5
0
def test_rff_feature_map_derivatives_loop_equals_map_derivative_d():
    N = 10
    D = 20
    m = 3
    X = np.random.randn(N, D)
    omega = np.random.randn(D, m)
    u = np.random.uniform(0, 2 * np.pi, m)
    
    derivatives = rff_feature_map_grad_loop(X, omega, u)
    
    for d in range(D):
        derivative = rff_feature_map_grad_d(X, omega, u, d)
        assert_allclose(derivatives[d], derivative)
Пример #6
0
def test_rff_feature_map_derivatives_loop_equals_map_derivative_d():
    N = 10
    D = 20
    m = 3
    X = np.random.randn(N, D)
    omega = np.random.randn(D, m)
    u = np.random.uniform(0, 2 * np.pi, m)

    derivatives = rff_feature_map_grad_loop(X, omega, u)

    for d in range(D):
        derivative = rff_feature_map_grad_d(X, omega, u, d)
        assert_allclose(derivatives[d], derivative)
Пример #7
0
def test_rff_feature_map_grad_single_equals_feature_map_derivative_d():
    D = 2
    m = 3
    omega = np.random.randn(D, m)
    u = np.random.uniform(0, 2 * np.pi, m)
    x = np.random.randn(D)
    
    grad = rff_feature_map_grad_single(x, omega, u)
    
    grad_manual = np.zeros((D, m))
    for d in range(D):
        grad_manual[d, :] = rff_feature_map_grad_d(x, omega, u, d)
    
    assert_allclose(grad_manual, grad)
Пример #8
0
def test_rff_feature_map_grad_single_equals_feature_map_derivative_d():
    D = 2
    m = 3
    omega = np.random.randn(D, m)
    u = np.random.uniform(0, 2 * np.pi, m)
    x = np.random.randn(D)

    grad = rff_feature_map_grad_single(x, omega, u)

    grad_manual = np.zeros((D, m))
    for d in range(D):
        grad_manual[d, :] = rff_feature_map_grad_d(x, omega, u, d)

    assert_allclose(grad_manual, grad)
Пример #9
0
def test_objective_sym_equals_completely_manual_manually():
    N = 100
    D = 3
    m = 3
    omega = np.random.randn(D, m)
    u = np.random.uniform(0, 2 * np.pi, m)
    X = np.random.randn(N, D)
    theta = np.random.randn(m)

    J_manual = 0.
    for n in range(N):
        b_manual = np.zeros(m)
        C_manual = np.zeros((m, m))
        J_n_manual = 0.
        for d in range(D):
            b_term_manual = -np.sqrt(2. / m) * np.cos(np.dot(X[n], omega) +
                                                      u) * (omega[d, :]**2)
            b_term = rff_feature_map_grad2_d(X[n], omega, u, d)
            assert_allclose(b_term_manual, b_term)
            b_manual -= b_term_manual
            J_manual += np.dot(b_term_manual, theta)
            J_n_manual += np.dot(b_term_manual, theta)

            c_vec_manual = -np.sqrt(2. / m) * np.sin(np.dot(X[n], omega) +
                                                     u) * omega[d, :]
            c_vec = rff_feature_map_grad_d(X[n], omega, u, d)
            assert_allclose(c_vec_manual, c_vec)
            C_term = np.outer(c_vec_manual, c_vec_manual)
            C_manual += C_term

            # not regularised here, done afterwards
            J_manual += 0.5 * np.dot(theta, np.dot(C_term, theta))
            J_n_manual += 0.5 * np.dot(theta, np.dot(C_term, theta))

        b = compute_b_memory(X[n].reshape(1, m), omega, u)
        C = compute_C_memory(X[n].reshape(1, m), omega, u)
        assert_allclose(b_manual, b)
        assert_allclose(C_manual, C)

        # discard regularisation for these internal checks
        J_n = objective(X[n].reshape(1, m), theta, omega, u)
        J_n_2 = 0.5 * np.dot(theta, np.dot(C, theta)) - np.dot(theta, b)
        assert_allclose(J_n_2, J_n, rtol=1e-4)
        assert_allclose(J_n_manual, J_n, rtol=1e-4)

    J_manual /= N
    J = objective(X, theta, omega, u)

    assert_close(J, J_manual, decimal=5)
Пример #10
0
def _objective_sym_half_manual(X, theta, omega, u):
    N = X.shape[0]
    D = X.shape[1]

    J_manual = 0.

    for n in range(N):
        for d in range(D):
            b_term = -rff_feature_map_grad2_d(X[n], omega, u, d)
            J_manual -= np.dot(b_term, theta)

            c_vec = rff_feature_map_grad_d(X[n], omega, u, d)
            C_term_manual = np.outer(c_vec, c_vec)
            J_manual += 0.5 * np.dot(theta, np.dot(C_term_manual, theta))

    J_manual /= N
    return J_manual
Пример #11
0
def _objective_sym_half_manual(X, theta, omega, u):
    N = X.shape[0]
    D = X.shape[1]
    
    J_manual = 0.
     
    for n in range(N):
        for d in range(D):
            b_term = -rff_feature_map_grad2_d(X[n], omega, u, d)
            J_manual -= np.dot(b_term, theta)

            c_vec = rff_feature_map_grad_d(X[n], omega, u, d)
            C_term_manual = np.outer(c_vec, c_vec)
            J_manual += 0.5 * np.dot(theta, np.dot(C_term_manual, theta))
    
    J_manual /= N
    return J_manual