def test_weights_to_lmbdas_equals_log_version():
    N = 30
    
    sum_old_weights = 1
    new_weights = np.ones(N - 1)
    lmbdas = weights_to_lmbdas(sum_old_weights, new_weights)

    log_sum_old_weights = np.log(sum_old_weights)
    log_new_weights = np.log(new_weights)
    lmbdas2 = log_weights_to_lmbdas(log_sum_old_weights, log_new_weights)
     
    assert_allclose(lmbdas, lmbdas2)
def test_weights_to_lmbdas_equals_log_version():
    N = 30
    
    sum_old_weights = 1
    new_weights = np.ones(N - 1)
    lmbdas = weights_to_lmbdas(sum_old_weights, new_weights)

    log_sum_old_weights = np.log(sum_old_weights)
    log_new_weights = np.log(new_weights)
    lmbdas2 = log_weights_to_lmbdas(log_sum_old_weights, log_new_weights)
     
    assert_allclose(lmbdas, lmbdas2)
예제 #3
0
def update_b_L_C_weighted(X, b, L_C, log_sum_weights, log_weights, omega, u):
    m = 1 if np.isscalar(u) else len(u)
    N = X.shape[0]
    D = X.shape[1]

    # transform weights into (1-\lmbda)*old_mean+ \lmbda*new_term style updates
    lmbdas = log_weights_to_lmbdas(log_sum_weights, log_weights)

    # first and (negative) second derivatives of rff feature map
    projection = np.dot(X, omega) + u
    Phi = np.cos(projection) * np.sqrt(2. / m)
    Phi2 = np.sin(projection) * np.sqrt(2. / m)

    # not needed any longer
    del projection

    # work on upper triangular cholesky internally
    L_R = L_C.T

    b_new_term = np.zeros(m)
    for i in range(N):
        # downscale L_C once for every datum
        L_R *= np.sqrt(1 - lmbdas[i])

        b_new_term[:] = 0
        for d in range(D):
            b_new_term += Phi[i] * (omega[d, :]**2)

            # L_C is updated D times for every datum, each with fixed lmbda
            C_new_term = Phi2[i] * omega[d, :] * np.sqrt(lmbdas[i])
            cholupdate(L_R, C_new_term)

        # b is updated once per datum
        b = (1 - lmbdas[i]) * b + lmbdas[i] * b_new_term

    # transform back to lower triangular version
    L_C = L_R.T

    return b, L_C
예제 #4
0
def update_b_L_C_weighted(X, b, L_C, log_sum_weights, log_weights, omega, u):
    m = 1 if np.isscalar(u) else len(u)
    N = X.shape[0]
    D = X.shape[1]
    
    # transform weights into (1-\lmbda)*old_mean+ \lmbda*new_term style updates
    lmbdas = log_weights_to_lmbdas(log_sum_weights, log_weights)
    
    # first and (negative) second derivatives of rff feature map
    projection = np.dot(X, omega) + u
    Phi = np.cos(projection) * np.sqrt(2. / m)
    Phi2 = np.sin(projection) * np.sqrt(2. / m)
    
    # not needed any longer
    del projection
    
    # work on upper triangular cholesky internally
    L_R = L_C.T
    
    b_new_term = np.zeros(m)
    for i in range(N):
        # downscale L_C once for every datum
        L_R *= np.sqrt(1 - lmbdas[i])
        
        b_new_term[:] = 0
        for d in range(D):
            b_new_term += Phi[i] * (omega[d, :] ** 2)
            
            # L_C is updated D times for every datum, each with fixed lmbda
            C_new_term = Phi2[i] * omega[d, :] * np.sqrt(lmbdas[i])
            cholupdate(L_R, C_new_term)
        
        # b is updated once per datum
        b = (1 - lmbdas[i]) * b + lmbdas[i] * b_new_term
    
    # transform back to lower triangular version
    L_C = L_R.T
    
    return b, L_C