コード例 #1
0
def run_methods(train_points, train_targets, test_points, test_targets,
                model_parameters, optimizer_options, file_name, ind_num, title, show=False):
    method = 'means'

    print('Finding means...')
    means = KMeans(n_clusters=ind_num, n_init=1, max_iter=20)
    means.fit(train_points.T)
    inputs = means.cluster_centers_.T
    print('...found')

    for optimizer, color, opts in zip(['L-BFGS-B', 'Projected Newton'], ['-kx', '-mx'],
                                      optimizer_options):
        print('Optimizer', optimizer)
        model_covariance_obj = SquaredExponential(np.copy(model_parameters))
        new_gp = GPR(model_covariance_obj, method=method, optimizer=optimizer)
        res = new_gp.fit(train_points, train_targets, num_inputs=ind_num, optimizer_options=opts, inputs=inputs)
        name = optimizer
        metric = lambda w: new_gp.get_prediction_quality(w, train_points, train_targets, test_points, test_targets)
        x_lst, y_lst = res.plot_performance(metric, 'i', freq=1)
        plt.plot(x_lst, y_lst, color, label=name)

    plt.xlabel('Epoch')
    plt.ylabel('$R^2$-score on test data')
    plt.legend()
    plt.title(title)
    plt.savefig('../Plots/vi_variations/'+file_name + '.pgf')
    if show:
        plt.show()
コード例 #2
0
ファイル: visualization.py プロジェクト: afcarl/CourseProject
def onclick(event):
    plt.close('all')

    point_x, point_y = event.xdata, event.ydata
    data_points.append(point_x)
    data_targets.append(point_y)

    x_tr = np.array(data_points).reshape(-1)[None, :]
    y_tr = np.array(data_targets)
    new_gp = GPR(model_covariance_obj, method=method)
    # new_gp.fit(x_tr, y_tr, max_iter=max_iter)
    print(new_gp.covariance_obj.get_params())
    predicted_y_test, high, low = new_gp.predict(x_test, x_tr, y_tr)

    fig = plt.figure()
    gp_plot_reg_data(x_tr, y_tr, 'yo')
    gp_plot_reg_data(x_test, predicted_y_test, 'b')
    gp_plot_reg_data(x_test, means_y_test, '--b')
    gp_plot_reg_data(means_inducing_points, means_mean, 'bo', markersize=12)
    gp_plot_reg_data(x_test, means_low, '--g')
    gp_plot_reg_data(x_test, means_high, '--r')
    gp_plot_reg_data(x_test, low, 'g-')
    gp_plot_reg_data(x_test, high, 'r-')
    gp_plot_reg_data(x_test, y_test, 'y-')
    fig.canvas.mpl_connect('button_press_event', onclick)
    plt.ylim(-2, 2)
    plt.xlim(0, 1)
    plt.show()
コード例 #3
0
def run_methods(train_points, train_targets, test_points, test_targets,
                model_parameters, optimizer_options, file_name, ind_num, title, show=False):

    print('Finding means...')
    means = KMeans(n_clusters=ind_num, n_init=1, max_iter=40)
    means.fit(train_points.T)
    inputs = means.cluster_centers_.T
    print('...found')

    # method = 'svi'
    # parametrization = 'natural'
    # # optimizer = 'L-BFGS-B'
    # color = '-yo'
    # opts = optimizer_options[0]
    # print('svi')
    # model_covariance_obj = SquaredExponential(np.copy(model_parameters))
    # new_gp = GPR(model_covariance_obj, method=method, parametrization=parametrization)
    # res = new_gp.fit(train_points, train_targets, num_inputs=ind_num, optimizer_options=opts, inputs=inputs)
    # name = 'svi-natural'
    # metric = lambda w: new_gp.get_prediction_quality(w, test_points, test_targets)
    # x_lst, y_lst = res.plot_performance(metric, 'i', freq=1)
    # plt.plot(x_lst, y_lst, color, label=name)

    print('vi-means')
    method = 'means'
    opt_options = optimizer_options[1]

    model_covariance_obj = SquaredExponential(np.copy(model_parameters))
    new_gp = GPR(model_covariance_obj, method=method)
    res = new_gp.fit(train_points, train_targets, num_inputs=ind_num, optimizer_options=opt_options, inputs=inputs)
    name = 'vi-means'
    metric = lambda w: new_gp.get_prediction_quality(w, train_points, train_targets, test_points, test_targets)
    x_lst, y_lst = res.plot_performance(metric, 'i', freq=1)
    plt.plot(x_lst, y_lst, '-kx', label=name)
    print(x_lst[-1])

    plt.ylabel('$R^2$-score on test data')


    plt.legend()
    plt.title(title)
    # plt.savefig('../Plots/vi_vs_svi/'+file_name + '.pgf')
    if show:
        plt.show()
コード例 #4
0
def run_methods(train_points, train_targets, test_points, test_targets,
                model_parameters, optimizer_options, file_name, ind_num, title, show=False):

    method = 'svi'
    parametrization = 'cholesky'

    means = KMeans(n_clusters=ind_num, n_init=3, max_iter=100, random_state=241)
    means.fit(train_points.T)
    inputs = means.cluster_centers_.T

    # for optimizer, color, opts in zip(['SAG', 'FG', 'L-BFGS-B'], ['-ro', '-bo', '-go'],
    #                                   optimizer_options[:-1]):
    #     print('Optimizer', optimizer)
    #     model_covariance_obj = SquaredExponential(np.copy(model_parameters))
    #     new_gp = GPR(model_covariance_obj, method=method, parametrization=parametrization, optimizer=optimizer)
    #     res = new_gp.fit(train_points, train_targets, num_inputs=ind_num, optimizer_options=opts, inputs=inputs)
    #     name = 'svi-' + optimizer
    #     metric = lambda w: new_gp.get_prediction_quality(w, test_points, test_targets)
    #     x_lst, y_lst = res.plot_performance(metric, 'i', freq=5)
    #     plt.plot(x_lst, y_lst, color, label=name)

    parametrization = 'natural'
    print('Natural parametrization')

    opt_options = optimizer_options[-1]

    model_covariance_obj = SquaredExponential(np.copy(model_parameters))
    new_gp = GPR(model_covariance_obj, method=method, parametrization=parametrization)
    res = new_gp.fit(train_points, train_targets, num_inputs=ind_num, optimizer_options=opt_options, inputs=inputs)
    name = 'svi-natural'
    metric = lambda w: new_gp.get_prediction_quality(w, test_points, test_targets)
    x_lst, y_lst = res.plot_performance(metric, 'i', freq=5)
    print(y_lst)
    plt.plot(x_lst, y_lst, '-yo', label=name)


    plt.xlabel('Epoch')
    plt.ylabel('$R^2$-score on test data')
    plt.legend()
    plt.title(title)
    # plt.savefig('../Plots/svi_variations/'+file_name + '.pgf')
    if show:
        plt.show()
コード例 #5
0
ファイル: visualization.py プロジェクト: afcarl/CourseProject
import numpy as np
from matplotlib import pyplot as plt

from GP.covariance_functions import SquaredExponential
from GP.gaussian_process_regression import GPR
from GP.plotting import gp_plot_reg_data

data_params = np.array([1.1, 0.3, 0.1])
data_covariance_obj =  SquaredExponential(data_params)
# model_params = np.array([10.6, 5.2, 0.1])
model_params = np.array([1.5, 0.15, 0.1])
model_covariance_obj = SquaredExponential(model_params)
gp = GPR(data_covariance_obj)
num = 700
test_num = 100
dim = 1
seed = 22
method = 'brute'  # possible methods: 'brute', 'vi', 'means', 'svi'
parametrization = 'natural'  # possible parametrizations for svi method: cholesky, natural
ind_inputs_num = 5
max_iter = 100
lbfgsb_options = {'maxiter': max_iter, 'disp': False}

np.random.seed(seed)
x_tr = np.random.rand(dim, num)
if dim == 1:
    x_test = np.linspace(0, 1, test_num)
    x_test = x_test.reshape(1, test_num)
else:
    x_test = np.random.rand(dim, test_num)
y_tr, y_test = gp.generate_data(x_tr, x_test, seed=seed)
コード例 #6
0
from GP.gaussian_process_regression import GPR
from GP.plotting import plot_reg_data, plot_predictive
from GP.covariance_functions import SquaredExponential, Matern, GammaExponential
from matplotlib.mlab import griddata

data_params = np.array([1.0, 0.15, 0.1])
data_covariance_obj = SquaredExponential(data_params)

# model_params = np.array([1.0, 1., 0.1])
# model_covariance_obj = SquaredExponential(model_params)
# model_params = np.array([1.0, 0.1, 0.5, 0.1])
# model_covariance_obj = GammaExponential(model_params)
model_params = np.array([0.3, 0.2, .1])
model_covariance_obj = SquaredExponential(model_params)

gp = GPR(data_covariance_obj)
num = 50
test_num = 100
dim = 1
seed = 10
ind_inputs_num = 5
max_iter = 200
batch_size = 50

method = 'brute'  # possible methods: 'brute', 'vi', 'means', 'svi'
parametrization = 'cholesky'  # possible parametrizations for svi method: cholesky, natural
optimizer = 'L-BFGS-B'
# possible optimizers: 'AdaDelta', 'FG', 'L-BFGS-B' for cholesky-svi;
# 'L-BFGS-B' and 'Projected Newton' for 'means' and 'vi'

sag_options = {'maxiter': max_iter, 'batch_size': 20, 'print_freq': 10}
コード例 #7
0
import numpy as np

from experiments_svi_variations import run_methods
from GP.covariance_functions import SquaredExponential
from GP.gaussian_process_regression import GPR

data_params = np.array([1.1, 0.3, 0.1])
data_covariance_obj = SquaredExponential(data_params)
model_params = np.array([2.0, 1.0, 1.0])
model_covariance_obj = SquaredExponential(model_params)
gp = GPR(data_covariance_obj)
num = 500
test_num = 500
dim = 2
seed = 21
ind_inputs_num = 100
max_iter = 300
batch_size = 100
title='generated dataset, n = 500, d = 2, m=100'
file_name = 'small_generated'

# Generating data points
np.random.seed(seed)
x_tr = np.random.rand(dim, num)
if dim == 1:
    x_test = np.linspace(0, 1, test_num)
    x_test = x_test.reshape(1, test_num)
else:
    x_test = np.random.rand(dim, test_num)
y_tr, y_test = gp.generate_data(x_tr, x_test, seed=seed)
コード例 #8
0
def run_methods(train_points, train_targets, test_points, test_targets,
                model_parameters, m_list, file_name, title, show=False, full=True, vi=True):

    method = 'means'
    optimizer = 'L-BFGS-B'
    max_iter = 50
    options = {'maxiter': max_iter, 'disp': False, 'mydisp': True}

    means_r2 = []
    vi_r2 = []

    for m in m_list:
        print('m:', m)
        print('Finding means...')
        means = KMeans(n_clusters=m, n_init=1, max_iter=20)
        means.fit(train_points.T)
        inputs = means.cluster_centers_.T
        print('...found')

        model_covariance_obj = SquaredExponential(np.copy(model_parameters))
        new_gp = GPR(model_covariance_obj, method='means', optimizer=optimizer)
        res = new_gp.fit(train_points, train_targets, num_inputs=m, optimizer_options=options, inputs=inputs)
        predicted_y_test, _, _ = new_gp.predict(test_points)
        means_r2.append(r2_score(test_targets, predicted_y_test))

        if vi:
            model_covariance_obj = SquaredExponential(np.copy(model_parameters))
            new_gp = GPR(model_covariance_obj, method='vi', optimizer=optimizer)
            res = new_gp.fit(train_points, train_targets, num_inputs=m, optimizer_options=options, inputs=inputs)
            predicted_y_test, _, _ = new_gp.predict(test_points)
            vi_r2.append(r2_score(test_targets, predicted_y_test))

    if full:
        model_covariance_obj = SquaredExponential(np.copy(model_parameters))
        new_gp = GPR(model_covariance_obj, method='brute')
        res = new_gp.fit(train_points, train_targets, max_iter=max_iter)
        predicted_y_test, _, _ = new_gp.predict(test_points, train_points, train_targets)
        brute_r2 = r2_score(test_targets, predicted_y_test)

    plt.plot(range(len(m_list)), means_r2, '-kx', label='vi-means')
    if vi:
        plt.plot(range(len(m_list)), vi_r2, '-rx', label='vi')
    if full:
        plt.plot(range(len(m_list)), len(m_list) * [brute_r2], '--g', label='full GP')

    plt.xticks(range(len(m_list)), m_list)
    plt.xlabel('m')
    plt.ylabel('$R^2$-score on test data')
    # plt.ylim(0.5, 1)
    plt.legend(loc=4)
    plt.title(title)
    plt.savefig('../Plots/inducing_inputs/'+file_name + '.pgf')
    if show:
        plt.show()
コード例 #9
0
import numpy as np

from experiments_svi_variations import run_methods
from GP.covariance_functions import SquaredExponential
from GP.gaussian_process_regression import GPR

data_params = np.array([1.1, 0.3, 0.1])
data_covariance_obj = SquaredExponential(data_params)
model_params = np.array([2.0, 1.0, 1.0])
model_covariance_obj = SquaredExponential(model_params)
gp = GPR(data_covariance_obj)
num = 500
test_num = 500
dim = 2
seed = 21
ind_inputs_num = 100
max_iter = 300
batch_size = 100
title = 'generated dataset, n = 500, d = 2, m=100'
file_name = 'small_generated'

# Generating data points
np.random.seed(seed)
x_tr = np.random.rand(dim, num)
if dim == 1:
    x_test = np.linspace(0, 1, test_num)
    x_test = x_test.reshape(1, test_num)
else:
    x_test = np.random.rand(dim, test_num)
y_tr, y_test = gp.generate_data(x_tr, x_test, seed=seed)
コード例 #10
0
model_params = np.array([1.,  0.5,  1.])
model_covariance_obj = SquaredExponential(model_params)

model_params = np.array([0.6, 0.3, 0.1])
model_covariance_obj = SquaredExponential(model_params)
num = 200
test_num = 100
dim = 1
seed = 21
method = 'means'  # possible methods: 'brute', 'vi', 'means', 'svi'
parametrization = 'natural'  # possible parametrizations for svi method: cholesky, natural
ind_inputs_num = 30
max_iter = 100

if method == 'brute':
    new_gp = GPR(model_covariance_obj)
    new_gp.fit(x_tr, y_tr, max_iter=max_iter)
    predicted_y_test, high, low = new_gp.predict(x_test, x_tr, y_tr)

elif method == 'means' or method == 'vi':
    model_covariance_obj = SquaredExponential(model_params)
    new_gp = GPR(model_covariance_obj, method=method)
    start = time.time()
    new_gp.fit(x_tr, y_tr, num_inputs=ind_inputs_num, max_iter=max_iter)
    print(time.time() - start)
    inducing_points, mean, cov = new_gp.inducing_inputs
    predicted_y_test, high, low = new_gp.predict(x_test)

elif method == 'svi':
    model_covariance_obj = SquaredExponential(model_params)
    new_gp = GPR(model_covariance_obj, method=method, parametrization=parametrization)
コード例 #11
0
from GP.gaussian_process_regression import GPR
from GP.plotting import plot_reg_data, plot_predictive
from GP.covariance_functions import SquaredExponential, Matern, GammaExponential
from matplotlib.mlab import griddata

data_params = np.array([1.0, 0.15, 0.1])
data_covariance_obj = SquaredExponential(data_params)

# model_params = np.array([1.0, 1., 0.1])
# model_covariance_obj = SquaredExponential(model_params)
# model_params = np.array([1.0, 0.1, 0.5, 0.1])
# model_covariance_obj = GammaExponential(model_params)
model_params = np.array([0.3, 0.2, .1])
model_covariance_obj = SquaredExponential(model_params)

gp = GPR(data_covariance_obj)
num = 50
test_num = 100
dim = 1
seed = 10
ind_inputs_num = 5
max_iter = 200
batch_size = 50

method = 'brute'  # possible methods: 'brute', 'vi', 'means', 'svi'
parametrization = 'cholesky'  # possible parametrizations for svi method: cholesky, natural
optimizer = 'L-BFGS-B'
# possible optimizers: 'AdaDelta', 'FG', 'L-BFGS-B' for cholesky-svi;
# 'L-BFGS-B' and 'Projected Newton' for 'means' and 'vi'