Пример #1
0
def test_update_L_C_single_equals_compute_L_C_when_initialised_correctly():
    sigma = 1.
    lmbda = 2.
    N = 200
    D = 2
    m = 10
    X = np.random.randn(N, D)

    # basis
    omega, u = rff_sample_basis(D, m, sigma)

    # initial fit and update
    L_C_update = np.eye(m) * np.sqrt(lmbda)
    n_update = m
    for x in X:
        L_C_update = update_L_C_single(x, L_C_update, n_update, omega, u)
        n_update += 1

    # initial fit and batch (average of "fake" b and new observations
    L_C_fake = np.eye(m) * np.sqrt(lmbda)
    n_fake = m
    C_batch = (np.dot(L_C_fake, L_C_fake.T) * n_fake +
               compute_C(X, omega, u) * N) / (n_fake + N)
    L_C_batch = np.linalg.cholesky(C_batch)

    assert_allclose(L_C_update, L_C_batch)
Пример #2
0
    def __init__(self, sigma, lmbda, m, D):
        self.sigma = sigma
        self.lmbda = lmbda
        self.m = m
        self.D = D
        self.omega, self.u = rff_sample_basis(D, m, sigma)

        self._initialise_solution()
Пример #3
0
 def __init__(self, sigma, lmbda, m, D):
     self.sigma = sigma
     self.lmbda = lmbda
     self.m = m
     self.D = D
     self.omega, self.u = rff_sample_basis(D, m, sigma)
     
     self._initialise_solution()
Пример #4
0
def test_rff_feature_map_third_order_tensor_theano_execute():
    if not theano_available:
        raise SkipTest("Theano not available.")
       
    D = 2
    x = np.random.randn(D)
    m = 10
    sigma = 1.
    omega, u = rff_sample_basis(D, m, sigma)
    
    for i in range(m):
        rff_feature_map_comp_third_order_tensor_theano(x, omega[:, i], u[i])
Пример #5
0
def test_rff_feature_map_third_order_tensor_theano_execute():
    if not theano_available:
        raise SkipTest("Theano not available.")

    D = 2
    x = np.random.randn(D)
    m = 10
    sigma = 1.
    omega, u = rff_sample_basis(D, m, sigma)

    for i in range(m):
        rff_feature_map_comp_third_order_tensor_theano(x, omega[:, i], u[i])
Пример #6
0
def test_rff_feature_map_comp_theano_result_equals_manual():
    if not theano_available:
        raise SkipTest("Theano not available.")
    
    D = 2
    x = np.random.randn(D)
    m = 10
    sigma = 1.
    omega, u = rff_sample_basis(D, m, sigma)
    
    phi_manual = rff_feature_map_single(x, omega, u)
    for i in range(m):
        # phi_manual is a monte carlo average, so have to normalise by np.sqrt(m) here
        phi = rff_feature_map_comp_theano(x, omega[:, i], u[i]) / np.sqrt(m)
        assert_close(phi, phi_manual[i])
Пример #7
0
def test_rff_feature_map_comp_theano_result_equals_manual():
    if not theano_available:
        raise SkipTest("Theano not available.")

    D = 2
    x = np.random.randn(D)
    m = 10
    sigma = 1.
    omega, u = rff_sample_basis(D, m, sigma)

    phi_manual = rff_feature_map_single(x, omega, u)
    for i in range(m):
        # phi_manual is a monte carlo average, so have to normalise by np.sqrt(m) here
        phi = rff_feature_map_comp_theano(x, omega[:, i], u[i]) / np.sqrt(m)
        assert_close(phi, phi_manual[i])
Пример #8
0
def test_rff_feature_map_grad_theano_result_equals_manual():
    if not theano_available:
        raise SkipTest("Theano not available.")
      
    D = 2
    x = np.random.randn(D)
    X = x[np.newaxis, :]
    m = 10
    sigma = 1.
    omega, u = rff_sample_basis(D, m, sigma)
    grad_manual = rff_feature_map_grad(X, omega, u)[:, 0, :]
    
    for i in range(m):
        # phi_manual is a monte carlo average, so have to normalise by np.sqrt(m) here
        grad = rff_feature_map_comp_grad_theano(x, omega[:, i], u[i]) / np.sqrt(m)
        assert_close(grad, grad_manual[:, i])
Пример #9
0
def test_rff_feature_map_grad_theano_result_equals_manual():
    if not theano_available:
        raise SkipTest("Theano not available.")

    D = 2
    x = np.random.randn(D)
    X = x[np.newaxis, :]
    m = 10
    sigma = 1.
    omega, u = rff_sample_basis(D, m, sigma)
    grad_manual = rff_feature_map_grad(X, omega, u)[:, 0, :]

    for i in range(m):
        # phi_manual is a monte carlo average, so have to normalise by np.sqrt(m) here
        grad = rff_feature_map_comp_grad_theano(x, omega[:, i],
                                                u[i]) / np.sqrt(m)
        assert_close(grad, grad_manual[:, i])
Пример #10
0
def test_update_b_single_equals_compute_b_when_initialised_correctly():
    sigma = 1.
    N = 200
    D = 2
    m = 10
    X = np.random.randn(N, D)

    # basis
    omega, u = rff_sample_basis(D, m, sigma)

    # initial fit and update
    b_update = np.zeros(m)
    n_update = m
    for x in X:
        b_update = update_b_single(x, b_update, n_update, omega, u)
        n_update += 1

    # initial fit and batch (average of "fake" b and new observations
    b_fake = np.zeros(m)
    n_fake = m
    b_batch = (b_fake * n_fake + compute_b(X, omega, u) * N) / (n_fake + N)

    assert_allclose(b_update, b_batch)
Пример #11
0
    def set_parameters_from_dict(self, param_dict):
        EstimatorBase.set_parameters_from_dict(self, param_dict)

        # update basis
        self.omega, self.u = rff_sample_basis(self.D, self.m, self.sigma)
Пример #12
0
 def set_parameters_from_dict(self, param_dict):
     EstimatorBase.set_parameters_from_dict(self, param_dict)
     
     # update basis
     self.omega, self.u = rff_sample_basis(self.D, self.m, self.sigma)