Esempio n. 1
0
def modelselection_grid_search_simple(traindat=traindat,
                                      label_traindat=label_traindat):
    from shogun.Evaluation import CrossValidation, CrossValidationResult
    from shogun.Evaluation import ContingencyTableEvaluation, ACCURACY
    from shogun.Evaluation import StratifiedCrossValidationSplitting
    from shogun.ModelSelection import GridSearchModelSelection
    from shogun.ModelSelection import ModelSelectionParameters, R_EXP
    from shogun.ModelSelection import ParameterCombination
    from shogun.Features import Labels
    from shogun.Features import RealFeatures
    from shogun.Classifier import LibLinear, L2R_L2LOSS_SVC

    # build parameter tree to select C1 and C2
    param_tree_root = ModelSelectionParameters()
    c1 = ModelSelectionParameters("C1")
    param_tree_root.append_child(c1)
    c1.build_values(-2.0, 2.0, R_EXP)

    c2 = ModelSelectionParameters("C2")
    param_tree_root.append_child(c2)
    c2.build_values(-2.0, 2.0, R_EXP)

    # training data
    features = RealFeatures(traindat)
    labels = Labels(label_traindat)

    # classifier
    classifier = LibLinear(L2R_L2LOSS_SVC)

    # splitting strategy for cross-validation
    splitting_strategy = StratifiedCrossValidationSplitting(labels, 10)

    # evaluation method
    evaluation_criterium = ContingencyTableEvaluation(ACCURACY)

    # cross-validation instance
    cross_validation = CrossValidation(classifier, features, labels,
                                       splitting_strategy,
                                       evaluation_criterium)

    # model selection instance
    model_selection = GridSearchModelSelection(param_tree_root,
                                               cross_validation)

    # perform model selection with selected methods
    print "performing model selection of"
    param_tree_root.print_tree()
    best_parameters = model_selection.select_model()

    # print best parameters
    print "best parameters:"
    best_parameters.print_tree()

    # apply them and print result
    best_parameters.apply_to_machine(classifier)
    result = cross_validation.evaluate()
    result.print_result()
def modelselection_grid_search_simple(traindat=traindat, label_traindat=label_traindat):
	from shogun.Evaluation import CrossValidation, CrossValidationResult
	from shogun.Evaluation import ContingencyTableEvaluation, ACCURACY
	from shogun.Evaluation import StratifiedCrossValidationSplitting
	from shogun.ModelSelection import GridSearchModelSelection
	from shogun.ModelSelection import ModelSelectionParameters, R_EXP
	from shogun.ModelSelection import ParameterCombination
	from shogun.Features import Labels
	from shogun.Features import RealFeatures
	from shogun.Classifier import LibLinear, L2R_L2LOSS_SVC

	# build parameter tree to select C1 and C2 
	param_tree_root=ModelSelectionParameters()
	c1=ModelSelectionParameters("C1");
	param_tree_root.append_child(c1)
	c1.build_values(-2.0, 2.0, R_EXP);

	c2=ModelSelectionParameters("C2");
	param_tree_root.append_child(c2);
	c2.build_values(-2.0, 2.0, R_EXP);

	# training data
	features=RealFeatures(traindat)
	labels=Labels(label_traindat)

	# classifier
	classifier=LibLinear(L2R_L2LOSS_SVC)

	# splitting strategy for cross-validation
	splitting_strategy=StratifiedCrossValidationSplitting(labels, 10)

	# evaluation method
	evaluation_criterium=ContingencyTableEvaluation(ACCURACY)

	# cross-validation instance
	cross_validation=CrossValidation(classifier, features, labels,
		splitting_strategy, evaluation_criterium)

	# model selection instance
	model_selection=GridSearchModelSelection(param_tree_root,
		cross_validation)

	# perform model selection with selected methods
	print "performing model selection of"
	param_tree_root.print_tree()
	best_parameters=model_selection.select_model()

	# print best parameters
	print "best parameters:"
	best_parameters.print_tree()

	# apply them and print result
	best_parameters.apply_to_machine(classifier)
	result=cross_validation.evaluate()
	result.print_result()
def modelselection_parameter_tree_modular():
    from shogun.ModelSelection import ParameterCombination
    from shogun.ModelSelection import ModelSelectionParameters, R_EXP, R_LINEAR
    from shogun.ModelSelection import DynamicParameterCombinationArray
    from shogun.Kernel import PowerKernel
    from shogun.Kernel import GaussianKernel
    from shogun.Kernel import DistantSegmentsKernel
    from shogun.Distance import MinkowskiMetric

    root = ModelSelectionParameters()

    combinations = root.get_combinations()
    combinations.get_num_elements()

    c = ModelSelectionParameters("C")
    root.append_child(c)
    c.build_values(1, 11, R_EXP)

    power_kernel = PowerKernel()
    param_power_kernel = ModelSelectionParameters("kernel", power_kernel)
    root.append_child(param_power_kernel)

    param_power_kernel_degree = ModelSelectionParameters("degree")
    param_power_kernel_degree.build_values(1, 1, R_EXP)
    param_power_kernel.append_child(param_power_kernel_degree)

    metric1 = MinkowskiMetric(10)
    param_power_kernel_metric1 = ModelSelectionParameters("distance", metric1)

    param_power_kernel.append_child(param_power_kernel_metric1)

    param_power_kernel_metric1_k = ModelSelectionParameters("k")
    param_power_kernel_metric1_k.build_values(1, 12, R_LINEAR)
    param_power_kernel_metric1.append_child(param_power_kernel_metric1_k)

    gaussian_kernel = GaussianKernel()
    param_gaussian_kernel = ModelSelectionParameters("kernel", gaussian_kernel)

    root.append_child(param_gaussian_kernel)

    param_gaussian_kernel_width = ModelSelectionParameters("width")
    param_gaussian_kernel_width.build_values(1, 2, R_EXP)
    param_gaussian_kernel.append_child(param_gaussian_kernel_width)

    ds_kernel = DistantSegmentsKernel()
    param_ds_kernel = ModelSelectionParameters("kernel", ds_kernel)

    root.append_child(param_ds_kernel)

    param_ds_kernel_delta = ModelSelectionParameters("delta")
    param_ds_kernel_delta.build_values(1, 2, R_EXP)
    param_ds_kernel.append_child(param_ds_kernel_delta)

    param_ds_kernel_theta = ModelSelectionParameters("theta")
    param_ds_kernel_theta.build_values(1, 2, R_EXP)
    param_ds_kernel.append_child(param_ds_kernel_theta)

    root.print_tree()
    combinations = root.get_combinations()
    for i in range(combinations.get_num_elements()):
        combinations.get_element(i).print_tree()

    return
Esempio n. 4
0
def modelselection_parameter_tree_modular():
    from shogun.ModelSelection import ParameterCombination
    from shogun.ModelSelection import ModelSelectionParameters, R_EXP, R_LINEAR
    from shogun.ModelSelection import DynamicParameterCombinationArray
    from shogun.Kernel import PowerKernel
    from shogun.Kernel import GaussianKernel
    from shogun.Kernel import DistantSegmentsKernel
    from shogun.Distance import MinkowskiMetric

    root = ModelSelectionParameters()

    combinations = root.get_combinations()
    combinations.get_num_elements()

    c = ModelSelectionParameters('C')
    root.append_child(c)
    c.build_values(1, 11, R_EXP)

    power_kernel = PowerKernel()
    param_power_kernel = ModelSelectionParameters('kernel', power_kernel)
    root.append_child(param_power_kernel)

    param_power_kernel_degree = ModelSelectionParameters('degree')
    param_power_kernel_degree.build_values(1, 1, R_EXP)
    param_power_kernel.append_child(param_power_kernel_degree)

    metric1 = MinkowskiMetric(10)
    param_power_kernel_metric1 = ModelSelectionParameters('distance', metric1)

    param_power_kernel.append_child(param_power_kernel_metric1)

    param_power_kernel_metric1_k = ModelSelectionParameters('k')
    param_power_kernel_metric1_k.build_values(1, 12, R_LINEAR)
    param_power_kernel_metric1.append_child(param_power_kernel_metric1_k)

    gaussian_kernel = GaussianKernel()
    param_gaussian_kernel = ModelSelectionParameters('kernel', gaussian_kernel)

    root.append_child(param_gaussian_kernel)

    param_gaussian_kernel_width = ModelSelectionParameters('width')
    param_gaussian_kernel_width.build_values(1, 2, R_EXP)
    param_gaussian_kernel.append_child(param_gaussian_kernel_width)

    ds_kernel = DistantSegmentsKernel()
    param_ds_kernel = ModelSelectionParameters('kernel', ds_kernel)

    root.append_child(param_ds_kernel)

    param_ds_kernel_delta = ModelSelectionParameters('delta')
    param_ds_kernel_delta.build_values(1, 2, R_EXP)
    param_ds_kernel.append_child(param_ds_kernel_delta)

    param_ds_kernel_theta = ModelSelectionParameters('theta')
    param_ds_kernel_theta.build_values(1, 2, R_EXP)
    param_ds_kernel.append_child(param_ds_kernel_theta)

    root.print_tree()
    combinations = root.get_combinations()
    for i in range(combinations.get_num_elements()):
        combinations.get_element(i).print_tree()

    return
def run_demo():
    LG.basicConfig(level=LG.INFO)
    random.seed(572)

    #1. create toy data
    [x, y] = create_toy_data()

    feat_train = RealFeatures(transpose(x))
    labels = RegressionLabels(y)

    n_dimensions = 1

    #2. location of unispaced predictions
    X = SP.linspace(0, 10, 10)[:, SP.newaxis]

    #new interface with likelihood parametres being decoupled from the covaraince function
    likelihood = GaussianLikelihood()
    covar_parms = SP.log([2])
    hyperparams = {'covar': covar_parms, 'lik': SP.log([1])}

    #construct covariance function
    SECF = GaussianKernel(feat_train, feat_train, 2)
    covar = SECF
    zmean = ZeroMean()
    inf = ExactInferenceMethod(SECF, feat_train, zmean, labels, likelihood)

    gp = GaussianProcessRegression(inf, feat_train, labels)

    root = ModelSelectionParameters()
    c1 = ModelSelectionParameters("inference_method", inf)
    root.append_child(c1)

    c2 = ModelSelectionParameters("scale")
    c1.append_child(c2)
    c2.build_values(0.01, 4.0, R_LINEAR)
    c3 = ModelSelectionParameters("likelihood_model", likelihood)
    c1.append_child(c3)

    c4 = ModelSelectionParameters("sigma")
    c3.append_child(c4)
    c4.build_values(0.001, 4.0, R_LINEAR)
    c5 = ModelSelectionParameters("kernel", SECF)
    c1.append_child(c5)

    c6 = ModelSelectionParameters("width")
    c5.append_child(c6)
    c6.build_values(0.001, 4.0, R_LINEAR)

    crit = GradientCriterion()

    grad = GradientEvaluation(gp, feat_train, labels, crit)

    grad.set_function(inf)

    gp.print_modsel_params()

    root.print_tree()

    grad_search = GradientModelSelection(root, grad)

    grad.set_autolock(0)

    best_combination = grad_search.select_model(1)

    gp.set_return_type(GaussianProcessRegression.GP_RETURN_COV)

    St = gp.apply_regression(feat_train)

    St = St.get_labels()

    gp.set_return_type(GaussianProcessRegression.GP_RETURN_MEANS)

    M = gp.apply_regression()

    M = M.get_labels()

    #create plots
    plot_sausage(transpose(x), transpose(M), transpose(SP.sqrt(St)))
    plot_training_data(x, y)
    PL.show()
def run_demo():
    LG.basicConfig(level=LG.INFO)
    random.seed(572)

    #1. create toy data
    [x,y] = create_toy_data()

    feat_train = RealFeatures(transpose(x));
    labels = RegressionLabels(y);

    n_dimensions = 1
    
    #2. location of unispaced predictions
    X = SP.linspace(0,10,10)[:,SP.newaxis]
        
    #new interface with likelihood parametres being decoupled from the covaraince function
    likelihood = GaussianLikelihood()
    covar_parms = SP.log([2])
    hyperparams = {'covar':covar_parms,'lik':SP.log([1])}
    
    #construct covariance function
    SECF = GaussianKernel(feat_train, feat_train,2)
    covar = SECF
    zmean = ZeroMean();
    inf = ExactInferenceMethod(SECF, feat_train, zmean, labels, likelihood);

    gp = GaussianProcessRegression(inf, feat_train, labels);
    
    root=ModelSelectionParameters();
    c1=ModelSelectionParameters("inference_method", inf);
    root.append_child(c1);

    c2 = ModelSelectionParameters("scale");
    c1.append_child(c2);
    c2.build_values(0.01, 4.0, R_LINEAR);
    c3 = ModelSelectionParameters("likelihood_model", likelihood);
    c1.append_child(c3);

    c4=ModelSelectionParameters("sigma");
    c3.append_child(c4);
    c4.build_values(0.001, 4.0, R_LINEAR);
    c5 =ModelSelectionParameters("kernel", SECF);
    c1.append_child(c5);

    c6 =ModelSelectionParameters("width");
    c5.append_child(c6);
    c6.build_values(0.001, 4.0, R_LINEAR);
    
    crit = GradientCriterion();

    grad=GradientEvaluation(gp, feat_train, labels,
			crit);

    grad.set_function(inf);

    gp.print_modsel_params();

    root.print_tree();

    grad_search=GradientModelSelection(
			root, grad);

    grad.set_autolock(0);

    best_combination=grad_search.select_model(1);
    
    gp.set_return_type(GaussianProcessRegression.GP_RETURN_COV);

    St = gp.apply_regression(feat_train);
    
    St = St.get_labels();
    
    gp.set_return_type(GaussianProcessRegression.GP_RETURN_MEANS);

    M = gp.apply_regression();

    M = M.get_labels();
    
    #create plots
    plot_sausage(transpose(x),transpose(M),transpose(SP.sqrt(St)));
    plot_training_data(x,y);
    PL.show();
Esempio n. 7
0
def modelselection_parameter_tree_modular(dummy):
    from shogun.ModelSelection import ParameterCombination
    from shogun.ModelSelection import ModelSelectionParameters, R_EXP, R_LINEAR
    from shogun.ModelSelection import DynamicParameterCombinationArray
    from shogun.Kernel import PowerKernel
    from shogun.Kernel import GaussianKernel
    from shogun.Kernel import DistantSegmentsKernel
    from shogun.Distance import MinkowskiMetric

    root=ModelSelectionParameters()

    combinations=root.get_combinations()
    combinations.get_num_elements()

    c=ModelSelectionParameters('C');
    root.append_child(c)
    c.build_values(1, 11, R_EXP)

    power_kernel=PowerKernel()

    # print all parameter available for modelselection
    # Dont worry if yours is not included but, write to the mailing list
    power_kernel.print_modsel_params()

    param_power_kernel=ModelSelectionParameters('kernel', power_kernel)
    root.append_child(param_power_kernel)

    param_power_kernel_degree=ModelSelectionParameters('degree')
    param_power_kernel_degree.build_values(1, 1, R_EXP)
    param_power_kernel.append_child(param_power_kernel_degree)

    metric1=MinkowskiMetric(10)

    # print all parameter available for modelselection
    # Dont worry if yours is not included but, write to the mailing list
    metric1.print_modsel_params()

    param_power_kernel_metric1=ModelSelectionParameters('distance', metric1)

    param_power_kernel.append_child(param_power_kernel_metric1)

    param_power_kernel_metric1_k=ModelSelectionParameters('k')
    param_power_kernel_metric1_k.build_values(1, 12, R_LINEAR)
    param_power_kernel_metric1.append_child(param_power_kernel_metric1_k)

    gaussian_kernel=GaussianKernel()

    # print all parameter available for modelselection
    # Dont worry if yours is not included but, write to the mailing list
    gaussian_kernel.print_modsel_params()

    param_gaussian_kernel=ModelSelectionParameters('kernel', gaussian_kernel)

    root.append_child(param_gaussian_kernel)

    param_gaussian_kernel_width=ModelSelectionParameters('width')
    param_gaussian_kernel_width.build_values(1, 2, R_EXP)
    param_gaussian_kernel.append_child(param_gaussian_kernel_width)

    ds_kernel=DistantSegmentsKernel()

    # print all parameter available for modelselection
    # Dont worry if yours is not included but, write to the mailing list
    ds_kernel.print_modsel_params()

    param_ds_kernel=ModelSelectionParameters('kernel', ds_kernel)

    root.append_child(param_ds_kernel)

    param_ds_kernel_delta=ModelSelectionParameters('delta')
    param_ds_kernel_delta.build_values(1, 2, R_EXP)
    param_ds_kernel.append_child(param_ds_kernel_delta)

    param_ds_kernel_theta=ModelSelectionParameters('theta')
    param_ds_kernel_theta.build_values(1, 2, R_EXP)
    param_ds_kernel.append_child(param_ds_kernel_theta)

    root.print_tree()
    combinations=root.get_combinations()
    for i in range(combinations.get_num_elements()):
        combinations.get_element(i).print_tree()

    return
def run_demo():
    LG.basicConfig(level=LG.INFO)
    random.seed(572)

    x = np.linspace(0.0, 1.0, 80)
    y = np.sin(3.0 * np.pi * x)
    x = x[:, np.newaxis]

    feat_train   = RealFeatures(transpose(x));
    labels       = RegressionLabels(y);
    n_dimensions = 1 
        
    #new interface with likelihood parametres being decoupled from the covaraince function
    likelihood = GaussianLikelihood()
    covar_parms = SP.log([2])
    hyperparams = {'covar':covar_parms,'lik':SP.log([1])}
    
    #construct covariance function
    SECF  = GaussianKernel(feat_train, feat_train,2)
    covar = SECF
    zmean = ZeroMean();
    inf   = ExactInferenceMethod(SECF, feat_train, zmean, labels, likelihood);

    gp = GaussianProcessRegression(inf, feat_train, labels);
    
    root = ModelSelectionParameters();
    c1   = ModelSelectionParameters("inference_method", inf);
    root.append_child(c1);
    c2 = ModelSelectionParameters("scale");
    c1.append_child(c2);
    c2.build_values(0.01, 4.0, R_LINEAR);
    c3 = ModelSelectionParameters("likelihood_model", likelihood);
    c1.append_child(c3);
    c4 = ModelSelectionParameters("sigma");
    c3.append_child(c4);
    c4.build_values(0.001, 4.0, R_LINEAR);
    c5 = ModelSelectionParameters("kernel", SECF);
    c1.append_child(c5);
    c6 = ModelSelectionParameters("width");
    c5.append_child(c6);
    c6.build_values(0.001, 4.0, R_LINEAR);
    
    crit = GradientCriterion();

    grad = GradientEvaluation(gp, feat_train, labels, crit);
    grad.set_function(inf);

    gp.print_modsel_params();

    root.print_tree();

    grad_search=GradientModelSelection(root, grad)
    grad.set_autolock(0)

    best_combination=grad_search.select_model(1);

    x_test    = np.linspace(0.0, 1.0, 100)
    x_test    = x_test[:, np.newaxis]
    feat_test = RealFeatures(transpose(x_test));
    
    gp.set_return_type(GaussianProcessRegression.GP_RETURN_COV)

    St = gp.apply_regression(feat_test);   
    St = St.get_labels();
    
    gp.set_return_type(GaussianProcessRegression.GP_RETURN_MEANS);

    M = gp.apply_regression();
    M = M.get_labels();
    
    
    #create plots
    pylab.figure()
    # pylab.plot(x, y, 'rx')
    pylab.plot(x_test, M, 'ro')
    pylab.show()
def evaluation_cross_validation_classification(fm_train=traindat,fm_test=testdat,label_train=label_traindat,\
				       width=2.1,C=1,epsilon=1e-5,tube_epsilon=1e-2):
    from shogun.Evaluation import CrossValidation, CrossValidationResult
    from shogun.Evaluation import MeanSquaredError
    from shogun.Evaluation import CrossValidationSplitting
    from shogun.Features import Labels
    from shogun.Features import RealFeatures
    from shogun.Kernel import GaussianKernel
    from shogun.Regression import LibSVR
    from shogun.ModelSelection import GridSearchModelSelection
    from shogun.ModelSelection import ModelSelectionParameters, R_EXP
    from shogun.ModelSelection import ParameterCombination

    # training data
    features_train=RealFeatures(traindat)
    labels=Labels(label_traindat)

    # kernel
    kernel=GaussianKernel(features_train, features_train, width)
    
    # print all parameter available for modelselection
    # Dont worry if yours is not included but, write to the mailing list
    kernel.print_modsel_params()
    
    labels=Labels(label_train)

    # predictor
    predictor=LibSVR(C, tube_epsilon, kernel, labels)
    predictor.set_epsilon(epsilon)

    # splitting strategy for 5 fold cross-validation (for classification its better
    # to use "StratifiedCrossValidation", but the standard
    # "StratifiedCrossValidationSplitting" is also available
    splitting_strategy=CrossValidationSplitting(labels, 5)

    # evaluation method
    evaluation_criterium=MeanSquaredError()

    # cross-validation instance
    cross_validation=CrossValidation(predictor, features_train, labels,
	    splitting_strategy, evaluation_criterium)
	
    # (optional) repeat x-val 10 times
    cross_validation.set_num_runs(10)

    # (optional) request 95% confidence intervals for results (not actually needed
    # for this toy example)
    cross_validation.set_conf_int_alpha(0.05)

    # print all parameter available for modelselection
    # Dont worry if yours is not included but, write to the mailing list
    predictor.print_modsel_params()

    # build parameter tree to select C1 and C2 
    param_tree_root=ModelSelectionParameters()
    c1=ModelSelectionParameters("C1");
    param_tree_root.append_child(c1)
    c1.build_values(-2.0, 2.0, R_EXP);

    c2=ModelSelectionParameters("C2");
    param_tree_root.append_child(c2);
    c2.build_values(-2.0, 2.0, R_EXP);

    # model selection instance
    model_selection=GridSearchModelSelection(param_tree_root,
	    cross_validation)

    # perform model selection with selected methods
    #print "performing model selection of"
    print "parameter tree"
    param_tree_root.print_tree()
    
    print "starting model selection"
    # print the current parameter combination, if no parameter nothing is printed
    print_state=True
    # lock data before since model selection will not change the kernel matrix
    # (use with care) This avoids that the kernel matrix is recomputed in every
    # iteration of the model search
    predictor.data_lock(labels, features_train)
    best_parameters=model_selection.select_model(print_state)

    # print best parameters
    print "best parameters:"
    best_parameters.print_tree()

    # apply them and print result
    best_parameters.apply_to_machine(predictor)
    result=cross_validation.evaluate()
    print "mean:", result.mean
    if result.has_conf_int:
        print "[", result.conf_int_low, ",", result.conf_int_up, "] with alpha=", result.conf_int_alpha
Esempio n. 10
0
# classifier
classifier=LibSVM()
classifier.set_kernel(kernel)
classifier.set_labels(labels)

# splitting strategy for cross-validation
splitting_strategy=StratifiedCrossValidationSplitting(labels, 10)
# evaluation method
evaluation_criterium=ContingencyTableEvaluation(ACCURACY)
# cross-validation instance
cross_validation=CrossValidation(classifier, features, labels, splitting_strategy, evaluation_criterium)
# model selection instance
model_selection=GridSearchModelSelection(param_tree_root, cross_validation)
# perform model selection with selected methods
print "performing model selection of"
param_tree_root.print_tree()
print "before select model"
best_parameters=model_selection.select_model(True)
print "after select model"
# print best parameters
print "best parameters:"
best_parameters.print_tree()

# apply them and print result
best_parameters.apply_to_machine(classifier)

results = cross_validation.evaluate()
results.print_result()
print results.conf_int_low
print results.conf_int_up
print results.conf_int_alpha
def regression_gaussian_process_modelselection (n=100,n_test=100, \
		x_range=6,x_range_test=10,noise_var=0.5,width=1, seed=1):
		
	from shogun.Features import RealFeatures, RegressionLabels
	from shogun.Kernel import GaussianKernel
	from shogun.ModelSelection import GradientModelSelection, ModelSelectionParameters, R_LINEAR
	from shogun.Regression import GaussianLikelihood, ZeroMean, \
				ExactInferenceMethod, GaussianProcessRegression, GradientCriterion, \
				GradientEvaluation
		
	# Reproducable results
	random.seed(seed)
	
	# Easy regression data: one dimensional noisy sine wave
	X_train=random.rand(1,n)*x_range
	X_test=array([[float(i)/n_test*x_range_test for i in range(n_test)]])
	Y_test=sin(X_test)
	Y_train=sin(X_train)+random.randn(n)*noise_var
	
	# shogun representation
	labels=RegressionLabels(Y_train[0])
	feats_train=RealFeatures(X_train)
	feats_test=RealFeatures(X_test)
		
	# GP specification
	width=1
	shogun_width=width*width*2
	kernel=GaussianKernel(10,shogun_width)
	kernel.init(feats_train,feats_train)
	zmean = ZeroMean()
	likelihood = GaussianLikelihood()
	inf = ExactInferenceMethod(kernel, feats_train, zmean, labels, likelihood)
	gp = GaussianProcessRegression(inf, feats_train, labels)
	
	# Paramter tree for model selection
	root = ModelSelectionParameters() 
	c1 = ModelSelectionParameters("inference_method", inf)
	root.append_child(c1)

	c2 = ModelSelectionParameters("scale")
	c1.append_child(c2)
	c2.build_values(0.01, 4.0, R_LINEAR)

	c3 = ModelSelectionParameters("likelihood_model", likelihood)
	c1.append_child(c3)

	c4 = ModelSelectionParameters("sigma")
	c3.append_child(c4) 
	c4.build_values(0.001, 4.0, R_LINEAR) 

	c5 = ModelSelectionParameters("kernel", kernel) 
	c1.append_child(c5) 

	c6 = ModelSelectionParameters("width") 
	c5.append_child(c6) 
	c6.build_values(0.001, 4.0, R_LINEAR) 

	# Criterion for Gradient Search
	crit = GradientCriterion()
	
	# Evaluate our inference method for its derivatives
	grad = GradientEvaluation(gp, feats_train, labels, crit)
 
	grad.set_function(inf) 

	gp.print_modsel_params() 

	root.print_tree() 

	# Handles all of the above structures in memory
	grad_search = GradientModelSelection(root, grad) 

	# Set autolocking to false to get rid of warnings	
	grad.set_autolock(False) 

	# Search for best parameters
	best_combination = grad_search.select_model(True)

	# Outputs all result and information
	best_combination.print_tree() 
	best_combination.apply_to_machine(gp)

	result = grad.evaluate()
	result.print_result()
    
	#inference
	gp.set_return_type(GaussianProcessRegression.GP_RETURN_COV) 
	covariance = gp.apply_regression(feats_test) 
	covariance = covariance.get_labels() 
    
	gp.set_return_type(GaussianProcessRegression.GP_RETURN_MEANS) 
	mean = gp.apply_regression(feats_test) 
	mean = mean.get_labels() 

	# some things we can do
	alpha = inf.get_alpha()
	diagonal = inf.get_diagonal_vector()
	cholesky = inf.get_cholesky()
	
	# plot results
	plot(X_train[0],Y_train[0],'x') # training observations
	plot(X_test[0],Y_test[0],'-') # ground truth of test
	plot(X_test[0],mean, '-') # mean predictions of test
	
	legend(["training", "ground truth", "mean predictions"])
	
	show()

	return gp, alpha, labels, diagonal, covariance, mean, cholesky