Esempio n. 1
0
def predict(relevance_vectors, X, mean, kernel_choice):

    prediction = []

    for xi in range(len(X)):

        phi_x = 0
        for ri in range(len(relevance_vectors)):

            if kernel_choice == "gaussian":
                phi_x += mean[ri + 1] * (kernel.gaussian(
                    X[xi], relevance_vectors[ri])) + mean[0]
            elif kernel_choice == "linear":
                t1 = X[xi]
                phi_x += mean[ri + 1] * (kernel.linear_kernel(
                    X[xi], relevance_vectors[ri])) + mean[0]
            elif kernel_choice == "polynomial":
                t1 = X[xi]
                phi_x += mean[ri + 1] * (kernel.polynomial_kernel(
                    X[xi], relevance_vectors[ri])) + mean[0]
            elif kernel_choice == "linear_spline":
                phi_x += mean[ri + 1] * (kernel.linear_spline(
                    X[xi], relevance_vectors[ri])) + mean[0]
            elif kernel_choice == "rbf":
                phi_x += mean[ri + 1] * (kernel.rbf(
                    X[xi], relevance_vectors[ri]))  # + mean[0]

        phi_x += mean[0]
        prediction.append(phi_x)

    return prediction
Esempio n. 2
0
def main():
    try:
        image = Image.open(sys.argv[1])
    except IOError:
        print("Could not open the input \nUsage tick_jpg inputfile.")
        sys.exit()

    r, g, b = image.split()
    rr = np.real(np.array(r))
    gr = np.real(np.array(g))
    br = np.real(np.array(b))
    # too big    kern = kernel.gaussian(rr, 30.0)
    #    kern = kernel.gaussian(rr, 20.0)
    kern = kernel.gaussian(rr, 10.0)
    kern[0, 0] = 0.0
    rp = jacobi_step_with_kernel(rr, kern, 5)
    gp = jacobi_step_with_kernel(gr, kern, 5)
    bp = jacobi_step_with_kernel(br, kern, 5)
    rn = Image.fromarray(np.uint8(rescale(rp, 255.0)))
    gn = Image.fromarray(np.uint8(rescale(gp, 255.0)))
    bn = Image.fromarray(np.uint8(rescale(bp, 255.0)))
    inew = Image.merge("RGB", (rn, gn, bn))
    inew.save('after.jpg')
    ix = ImageChops.subtract(inew, image, 0.1)
    ix.save('difference.jpg')
def test_gaussian_2():
	sigma = 2
	k = 3

	filter_kernel = kernel.gaussian(sigma, k)

	assert np.sum(filter_kernel) == 1
def test_gaussian_1():
	sigma = 2
	k = 1

	filter_kernel = kernel.gaussian(sigma, k)

	assert filter_kernel.shape[0] == 1
	assert filter_kernel.shape[1] == 2 * k + 1
Esempio n. 5
0
def main(num_samples=10, num_features=2, grid_size=20):
    samples = np.matrix(np.random.normal(size=num_samples * num_features)
                        .reshape(num_samples, num_features))

    labels = 2 * (samples.sum(axis=1) > 0) - 1.0
    sigma = 2
    trainer =SVMTrainer(kernel.gaussian(sigma))

    predictor = trainer.train(samples, labels)

    plot(predictor, samples, labels, grid_size)
Esempio n. 6
0
def main():
    try:
        original = mrcfile.open(sys.argv[1], mode='r')
        output = mrcfile.open(sys.argv[2], mode='w+')
    except IOError:
        print(
            "Could not open the input \nUsage tick_mrc inputfile outputfile.")
        sys.exit()


# create a kernel
# 2d version
    kern = kernel.gaussian(original.data[0, ], 15.)
    kern[0, 0] = 0.

    # create list of layers.
    layers = []
    for layer in original.data:
        layers.append(np.float32(jacobi_step_with_kernel(layer, kern, 2)))
    output.set_data(np.array(layers))

    #    output.set_data(original.data)
    #    output.set_data(np.float32( jacobi_step(original.data, 2)))
    # I cannot believe there isn't a method for this
    # but there isn't. FTW!
    output.header.nx = original.header.nx
    output.header.ny = original.header.ny
    output.header.nz = original.header.nz
    #    output.header.mode = original.header.mode
    output.header.mode = 2  # for float32
    output.header.nxstart = original.header.nxstart
    output.header.nystart = original.header.nystart
    output.header.nzstart = original.header.nzstart
    output.header.mx = original.header.mx
    output.header.my = original.header.my
    output.header.mz = original.header.mz
    output.header.cella = original.header.cella
    output.header.cellb = original.header.cellb
    output.header.mapc = original.header.mapc
    output.header.mapr = original.header.mapr
    output.header.maps = original.header.maps
    output.header.ispg = original.header.ispg
    output.header.nsymbt = original.header.nsymbt
    output.set_extended_header(original.extended_header)
    output.header.exttyp = original.header.exttyp
    output.header.nversion = original.header.nversion
    output.header.origin = original.header.origin
    output.header.map = original.header.map
    output.header.machst = original.header.machst
    output.header.rms = original.header.rms
    output.header.nlabl = original.header.nlabl
    output.header.label = original.header.label
    output.close()
def design_matrix_classification(N, kernel_mode, X):

    ret = np.ndarray(shape=(N, N))

    for i in range(0, N):
        for j in range(0, N):
            if (kernel_mode == "polynomial"):
                ret[i, j] = kernel.polynomial_kernel(X[i], X[j - 1])
            elif (kernel_mode == "linear_spline"):
                ret[i, j] = kernel.linear_spline(X[i], X[j - 1])
            elif (kernel_mode == "gaussian"):
                ret[i, j] = kernel.gaussian(X[i], X[j - 1])
            elif (kernel_mode == "rbf"):
                ret[i, j] = kernel.rbf(X[i], X[j - 1])

    return ret
Esempio n. 8
0
def Relevance_Vector_Classification_Prediction (Xtest, relevance_vectors, weightMaxPosteriori, kernel_choice):

    Psum = 0
    res = []
    for xi in range(len(Xtest)):
        for ri in range (len(relevance_vectors)):
            if kernel_choice=="gaussian":
                Psum+=weightMaxPosteriori[ri+1]*(kernel.gaussian(Xtest[xi], relevance_vectors[ri])) + weightMaxPosteriori[0]
            elif kernel_choice=="linear":
                Psum+=weightMaxPosteriori[ri+1]*(kernel.linear_kernel(Xtest[xi], relevance_vectors[ri])) + weightMaxPosteriori[0]
            elif kernel_choice=="polynomial":
                Psum+=weightMaxPosteriori[ri+1]*(kernel.polynomial_kernel(Xtest[xi], relevance_vectors[ri])) + weightMaxPosteriori[0]
            elif kernel_choice=="linear_spline":
                Psum+=weightMaxPosteriori[ri+1]*(kernel.linear_spline(Xtest[xi], relevance_vectors[ri])) + weightMaxPosteriori[0]

        y = sigmoid_function(Psum)
            
        if y >0.5:
            res.append(1)
        elif y<= 0.5:
            res.append(0)

    return res
Esempio n. 9
0
    X = np.append(np.ones((1, I_0 + I_1)), X, axis=0)
    w = np.append(np.zeros((I_0, 1)), np.ones((I_1, 1)), axis=0)
    var_prior = 6
    X_test = np.arange(-5, 5, 0.1)
    X_test = np.append(
        np.ones((1, X_test.size)),
        X_test.reshape(1, X_test.size),
        axis=0
    )

    p_lambdas = [0.3, 1, 5, 15]
    for index, p_lambda in enumerate(p_lambdas):
        initial_psi = np.zeros((X.shape[1], 1))
        predictions, psi = classification.fit_gaussian_process(
            X, w, var_prior, X_test, initial_psi,
            lambda x_i, x_j: kernel.gaussian(x_i, x_j, p_lambda)
        )

        plt.subplot(2, 2, index + 1)
        plt.plot(np.arange(-5, 5, 0.1), predictions)
        plt.scatter(class_0, np.zeros((1, I_0)), 50, c="r", edgecolors="k")
        plt.scatter(class_1, np.zeros((1, I_1)), 50, c="g", edgecolors="k")
        plt.axis([-5, 5, -0.1, 1.1])

    plt.figure("2D Gaussian process classification")
    granularity = 100
    a = -5
    b = 5
    domain = np.linspace(a, b, granularity)
    X, Y = np.meshgrid(domain, domain)
    x = X.reshape((1, X.size))
Esempio n. 10
0
    # Prepare the training input
    X_train = np.append(np.ones((1, X_data.shape[0])),
                        X_data[:, 0].reshape((1, X_data.shape[0])),
                        axis=0)
    w = X_data[:, 1].reshape((X_data.shape[0], 1))
    var_prior = 6
    X_test = domain.reshape((1, granularity))
    X_test = np.append(np.ones((1, granularity)), X_test, axis=0)

    # Train 6 Gaussian process regression model for different values for nu
    plt.figure("Fit Gaussian process regression")
    nus = [0.5, 0.7, 0.9, 1.5, 2, 3]
    for nu_index, nu in enumerate(nus):
        mu_test, var_test = regression.fit_gaussian_process(
            X_train, w, var_prior, X_test,
            lambda x_i, x_j: kernel.gaussian(x_i, x_j, nu))
        Z = np.zeros((granularity, granularity))
        for j in range(granularity):
            mu = mu_test[j, 0]
            var = var_test[j, 0]
            for i in range(granularity):
                ww = domain[i]
                Z[i, j] = gaussian(ww, mu, var)

        plt.subplot(3, 2, nu_index + 1)
        plt.pcolor(X, Y, Z)
        plt.scatter(X_data[:, 0], X_data[:, 1], edgecolors="w")
        plt.axis([-5, 5, -5, 5])

    plt.show()
Esempio n. 11
0
'''Use case: MNIST'''

import mnist
import torch

import kernel
import eigenpro


n_class = 10
(x_train, y_train), (x_test, y_test) = mnist.load()
x_train, y_train, x_test, y_test = x_train.astype('float32'), \
    y_train.astype('float32'), x_test.astype('float32'), y_test.astype('float32')

use_cuda = torch.cuda.is_available()
device = torch.device("cuda" if use_cuda else "cpu")

kernel_fn = lambda x,y: kernel.gaussian(x, y, bandwidth=5)
model = eigenpro.FKR_EigenPro(kernel_fn, x_train, n_class, device=device)
_ = model.fit(x_train, y_train, x_test, y_test, epochs=[1, 2, 5], mem_gb=12)