num_pseudo_params = 10#50 
    n_trials = 2

    dimension_set = [[1,1],[1,1,1],[1,1,1,1]]#[[1,1],[1,1,1],[1,1,1,1]]
    n_data_set = [75,150,300]

    npr.seed(0)
    rs = npr.RandomState(0)

    results = []

    for i in xrange(n_trials):
        
        print("Trial {}".format(i))
        for n_data in n_data_set:
            X, y = build_step_function_dataset(D=1, n_data=n_data)
            X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
            for dimensions in dimension_set:
                n_layers = len(dimensions)-1 
                start_time = time.time()

                total_num_params, log_likelihood, sample_mean_cov_from_deep_gp, predict_layer_funcs, squared_error, create_deep_map = \
                    build_deep_gp(dimensions, rbf_covariance, num_pseudo_params, random)

                init_params = .1 * npr.randn(total_num_params)
                deep_map = create_deep_map(init_params)

                init_params = initialize(deep_map,X,num_pseudo_params)
                print("Optimizing covariance parameters...")
                objective = lambda params: -log_likelihood(params,X,y,n_samples)
Exemple #2
0
import autograd.numpy as np
import autograd.numpy.random as npr
from autograd import value_and_grad,grad
from scipy.optimize import minimize

from deep_gaussian_process_6 import rbf_covariance, pack_gp_params, pack_layer_params, pack_deep_params, build_deep_gp, initialize,build_step_function_dataset

if __name__ == '__main__':
    random = 1 
    n_samples = 10 
    n_samples_to_plot = 10 
    
    n_data = 20 
    input_dimension = 1
    num_pseudo_params = 10 
    X, y = build_step_function_dataset(D=input_dimension, n_data=n_data)

    dimension_set = [[1,1],[1,1,1]] # Architecture of the GP. Last layer should always be 1 
    
    fig = plt.figure(figsize=(20,8), facecolor='white')
    ax_one_layer = fig.add_subplot(121, frameon=False)
    ax_two_layer = fig.add_subplot(122, frameon=False)
    plt.show(block=False) 

    for dimensions in dimension_set:
        n_layers = len(dimensions)-1 

        total_num_params, log_likelihood, sample_mean_cov_from_deep_gp, predict_layer_funcs, squared_error, create_deep_map = \
            build_deep_gp(dimensions, rbf_covariance, num_pseudo_params, random)

        def callback(params):

if __name__ == '__main__':
    random = 1

    n_samples = 10
    n_samples_to_test = 100
    num_pseudo_params = 10

    dimensions = [1, 1, 1, 1]
    n_data = 60

    npr.seed(1)  #Randomness comes from KMeans
    rs = npr.RandomState(1)

    X, y = build_step_function_dataset(D=1, n_data=n_data)
    X_train, X_test, y_train, y_test = train_test_split(X,
                                                        y,
                                                        test_size=0.2,
                                                        random_state=42)

    n_layers = len(dimensions) - 1

    total_num_params, log_likelihood, sample_mean_cov_from_deep_gp, predict_layer_funcs, squared_error, create_deep_map = \
            build_deep_gp(dimensions, rbf_covariance, num_pseudo_params, random)

    init_params = .1 * rs.randn(total_num_params)
    deep_map = create_deep_map(init_params)

    init_params = initialize(deep_map, X, num_pseudo_params)
    print("Optimizing covariance parameters...")