Exemplo n.º 1
0
def predict(relevance_vectors, X, mean, kernel_choice):

    prediction = []

    for xi in range(len(X)):

        phi_x = 0
        for ri in range(len(relevance_vectors)):

            if kernel_choice == "gaussian":
                phi_x += mean[ri + 1] * (kernel.gaussian(
                    X[xi], relevance_vectors[ri])) + mean[0]
            elif kernel_choice == "linear":
                t1 = X[xi]
                phi_x += mean[ri + 1] * (kernel.linear_kernel(
                    X[xi], relevance_vectors[ri])) + mean[0]
            elif kernel_choice == "polynomial":
                t1 = X[xi]
                phi_x += mean[ri + 1] * (kernel.polynomial_kernel(
                    X[xi], relevance_vectors[ri])) + mean[0]
            elif kernel_choice == "linear_spline":
                phi_x += mean[ri + 1] * (kernel.linear_spline(
                    X[xi], relevance_vectors[ri])) + mean[0]
            elif kernel_choice == "rbf":
                phi_x += mean[ri + 1] * (kernel.rbf(
                    X[xi], relevance_vectors[ri]))  # + mean[0]

        phi_x += mean[0]
        prediction.append(phi_x)

    return prediction
def design_matrix_classification(N, kernel_mode, X):

    ret = np.ndarray(shape=(N, N))

    for i in range(0, N):
        for j in range(0, N):
            if (kernel_mode == "polynomial"):
                ret[i, j] = kernel.polynomial_kernel(X[i], X[j - 1])
            elif (kernel_mode == "linear_spline"):
                ret[i, j] = kernel.linear_spline(X[i], X[j - 1])
            elif (kernel_mode == "gaussian"):
                ret[i, j] = kernel.gaussian(X[i], X[j - 1])
            elif (kernel_mode == "rbf"):
                ret[i, j] = kernel.rbf(X[i], X[j - 1])

    return ret
Exemplo n.º 3
0
def check_polynomial_kernel():
    ex_name = "Polynomial kernel"
    n, m, d = 3, 5, 7
    c = 1
    p = 2
    X = np.random.random((n, d))
    Y = np.random.random((m, d))
    try:
        K = kernel.polynomial_kernel(X, Y, c, d)
    except NotImplementedError:
        log(red("FAIL"), ex_name, ": not implemented")
        return True
    for i in range(n):
        for j in range(m):
            exp = (X[i] @ Y[j] + c)**d
            got = K[i][j]
            if (not equals(exp, got)):
                log(
                    red("FAIL"), ex_name,
                    ": values at ({}, {}) do not match. Expected {}, got {}".
                    format(i, j, exp, got))
    log(green("PASS"), ex_name, "")
Exemplo n.º 4
0
def Relevance_Vector_Classification_Prediction (Xtest, relevance_vectors, weightMaxPosteriori, kernel_choice):

    Psum = 0
    res = []
    for xi in range(len(Xtest)):
        for ri in range (len(relevance_vectors)):
            if kernel_choice=="gaussian":
                Psum+=weightMaxPosteriori[ri+1]*(kernel.gaussian(Xtest[xi], relevance_vectors[ri])) + weightMaxPosteriori[0]
            elif kernel_choice=="linear":
                Psum+=weightMaxPosteriori[ri+1]*(kernel.linear_kernel(Xtest[xi], relevance_vectors[ri])) + weightMaxPosteriori[0]
            elif kernel_choice=="polynomial":
                Psum+=weightMaxPosteriori[ri+1]*(kernel.polynomial_kernel(Xtest[xi], relevance_vectors[ri])) + weightMaxPosteriori[0]
            elif kernel_choice=="linear_spline":
                Psum+=weightMaxPosteriori[ri+1]*(kernel.linear_spline(Xtest[xi], relevance_vectors[ri])) + weightMaxPosteriori[0]

        y = sigmoid_function(Psum)
            
        if y >0.5:
            res.append(1)
        elif y<= 0.5:
            res.append(0)

    return res
Exemplo n.º 5
0
            folder_cache, model, temp_parameter))
        plot_cost_function_over_time(cost)
    return theta


temp_parameter = 1

theta = run_kernel_softmax("lineal_pca18", linear_kernel, train_pca, train_y,
                           temp_parameter)
test_error = compute_kernel_test_error(test_pca, test_y, linear_kernel, theta,
                                       train_pca, temp_parameter)
print('\nsoftmax_kernel lineal_pca18 \t\ttest_error:')
print('(t = {})  \t\t\t\t{:.3}'.format(temp_parameter, test_error))

c, p = 0.5, 2
kernel = lambda X, Y: polynomial_kernel(X, Y, c, p)
theta = run_kernel_softmax("polinomial_{}_{}_pca18".format(c, p), kernel,
                           train_pca, train_y, temp_parameter)
test_error = compute_kernel_test_error(test_pca, test_y, kernel, theta,
                                       train_pca, temp_parameter)
print('\nsoftmax_kernel polinomial_pca18 \ttest_error:')
print('(t = {}, c = {}, p = {})  \t\t{:.3}'.format(temp_parameter, c, p,
                                                   test_error))

gamma = 1
kernel = lambda X, Y: rbf_kernel(X, Y, gamma)
theta = run_kernel_softmax("rbf_{}_pca18".format(gamma), kernel, train_pca,
                           train_y, temp_parameter)
test_error = compute_kernel_test_error(test_pca, test_y, kernel, theta,
                                       train_pca, temp_parameter)
print('\nsoftmax_kernel rbf_pca18 \t\ttest_error:')