Example #1
0
File: train.py Project: NICTA/dora
def condition(X, y, kernelFn, hyper_opt_config_copys):
    assert len(y.shape) == 1  # y must be shapeless (n, )
    h_kernel, noise_std = hyper_opt_config_copys
    kernel = lambda x1, x2: kernelFn(x1, x2, h_kernel)
    noise_vector = predict.noise_vector(X, noise_std)
    L = linalg.cholesky(X, kernel, noise_vector)
    alpha = predict.alpha(y, L)
    return types.RegressionParams(X, L, alpha, kernel, y, noise_std)
Example #2
0
def condition(X, y, kernelFn, hyper_opt_config_copys):
    assert len(y.shape) == 1  #y must be shapeless (n, )
    h_kernel, noise_std = hyper_opt_config_copys
    kernel = lambda x1, x2: kernelFn(x1, x2, h_kernel)
    noise_vector = predict.noise_vector(X, noise_std)
    L = linalg.cholesky(X, kernel, noise_vector)
    alpha = predict.alpha(y, L)
    return types.RegressionParams(X, L, alpha, kernel, y, noise_std)
Example #3
0
File: train.py Project: NICTA/dora
 def criterion(sigma, noise):
     k = lambda x1, x2: cov_fn(x1, x2, sigma)
     X_noise = predict.noise_vector(X, noise)
     L = linalg.cholesky(X, k, X_noise)
     a = predict.alpha(Y, L)
     if optCrition == "logMarg":
         val = negative_log_marginal_likelihood(Y, L, a)
     elif optCrition == "crossVal":
         val = negative_log_prob_cross_val(Y, L, a)
     if verbose is True:
         print("[" + str(val) + "]  ", sigma, noise)
     return val
Example #4
0
 def criterion(sigma, noise):
     k = lambda x1, x2: cov_fn(x1, x2, sigma)
     X_noise = predict.noise_vector(X, noise)
     L = linalg.cholesky(X, k, X_noise)
     a = predict.alpha(Y, L)
     if optCrition == 'logMarg':
         val = negative_log_marginal_likelihood(Y, L, a)
     elif optCrition == 'crossVal':
         val = negative_log_prob_cross_val(Y, L, a)
     if verbose is True:
         print('[' + str(val) + ']  ', sigma, noise)
     return val
Example #5
0
File: train.py Project: NICTA/dora
def remove_data(regressor, remID, query=None):
    assert isinstance(regressor, types.RegressionParams)
    assert not query or isinstance(query, types.QueryParams)

    regressor.X = np.delete(regressor.X, remID, axis=0)
    regressor.y = np.delete(regressor.y, remID, axis=0)
    # regressor.L = chol_down(regressor.L, remID)

    noise_vector = predict.noise_vector(regressor.X, regressor.noise_std)
    regressor.L = linalg.cholesky(regressor.X, regressor.kernel, noise_vector)
    regressor.alpha = predict.alpha(regressor.y, regressor.L)

    # Optionally update the query
    if query is not None:
        query.K_xxs = np.delete(query.K_xxs, remID, axis=0)
Example #6
0
def remove_data(regressor, remID, query=None):
    assert (isinstance(regressor, types.RegressionParams))
    assert (not query or isinstance(query, types.QueryParams))

    regressor.X = np.delete(regressor.X, remID, axis=0)
    regressor.y = np.delete(regressor.y, remID, axis=0)
    # regressor.L = chol_down(regressor.L, remID)

    noise_vector = predict.noise_vector(regressor.X, regressor.noise_std)
    regressor.L = linalg.cholesky(regressor.X, regressor.kernel, noise_vector)
    regressor.alpha = predict.alpha(regressor.y, regressor.L)

    # Optionally update the query
    if query is not None:
        query.K_xxs = np.delete(query.K_xxs, remID, axis=0)
Example #7
0
File: train.py Project: NICTA/dora
 def criterion(sigma, noise):
     k = lambda x1, x2: cov_fn(x1, x2, sigma)
     val = 0
     for f in range(folds.n_folds):
         Xf = folds.X[f]
         Yf = folds.flat_y[f]
         Xf_noise = predict.noise_vector(Xf, noise)
         Lf = linalg.cholesky(Xf, k, Xf_noise)
         af = predict.alpha(Yf, Lf)
         if optCrition == "logMarg":
             val += negative_log_marginal_likelihood(Yf, Lf, af)
         elif optCrition == "crossVal":
             val += negative_log_prob_cross_val(Yf, Lf, af)
     if verbose is True:
         print("[" + str(val) + "]  ", sigma, noise)
     return val
Example #8
0
 def criterion(sigma, noise):
     k = lambda x1, x2: cov_fn(x1, x2, sigma)
     val = 0
     for f in range(folds.n_folds):
         Xf = folds.X[f]
         Yf = folds.flat_y[f]
         Xf_noise = predict.noise_vector(Xf, noise)
         Lf = linalg.cholesky(Xf, k, Xf_noise)
         af = predict.alpha(Yf, Lf)
         if optCrition == 'logMarg':
             val += negative_log_marginal_likelihood(Yf, Lf, af)
         elif optCrition == 'crossVal':
             val += negative_log_prob_cross_val(Yf, Lf, af)
     if verbose is True:
         print('[' + str(val) + ']  ', sigma, noise)
     return val