def variational_classifier_modular(kl_inference,train_fname=traindat,test_fname=testdat,
	label_fname=label_binary_traindat,kernel_log_sigma=0,kernel_log_scale=0,noise_factor=1e-5,
	min_coeff_kernel=1e-2,max_attempt=0):
	from math import exp
	features_train=RealFeatures(CSVFile(train_fname))
	labels_train=BinaryLabels(CSVFile(label_fname))

	likelihood=LogitDVGLikelihood()
	error_eval=ErrorRateMeasure()
	mean_func=ConstMean()
	kernel_sigma=2*exp(2*kernel_log_sigma);
	kernel_func=GaussianKernel(10, kernel_sigma)

	inf=kl_inference(kernel_func, features_train, mean_func, labels_train, likelihood)
	try:
		inf.set_noise_factor(noise_factor)
		inf.set_min_coeff_kernel(min_coeff_kernel)
		inf.set_max_attempt(max_attempt)
	except:
		pass
	inf.set_scale(exp(kernel_log_scale))
	gp=GaussianProcessClassification(inf)
	gp.train()
	pred_labels_train=gp.apply_binary(features_train)
	error_train=error_eval.evaluate(pred_labels_train, labels_train)
	#print "\nInference name:%s"%inf.get_name(),
	#print "marginal likelihood:%.10f"%inf.get_negative_log_marginal_likelihood(),
	#print "Training error %.4f"%error_train
	return pred_labels_train, gp, pred_labels_train.get_labels()
Esempio n. 2
0
def variational_classifier_modular(kl_inference,
                                   train_fname=traindat,
                                   test_fname=testdat,
                                   label_fname=label_binary_traindat,
                                   kernel_log_sigma=0,
                                   kernel_log_scale=0,
                                   noise_factor=1e-5,
                                   min_coeff_kernel=1e-2,
                                   max_attempt=0):
    from math import exp
    features_train = RealFeatures(CSVFile(train_fname))
    labels_train = BinaryLabels(CSVFile(label_fname))

    likelihood = LogitDVGLikelihood()
    error_eval = ErrorRateMeasure()
    mean_func = ConstMean()
    kernel_sigma = 2 * exp(2 * kernel_log_sigma)
    kernel_func = GaussianKernel(10, kernel_sigma)

    inf = kl_inference(kernel_func, features_train, mean_func, labels_train,
                       likelihood)
    try:
        inf.set_noise_factor(noise_factor)
        inf.set_min_coeff_kernel(min_coeff_kernel)
        inf.set_max_attempt(max_attempt)
    except:
        pass
    inf.set_scale(exp(kernel_log_scale))
    gp = GaussianProcessClassification(inf)
    gp.train()
    pred_labels_train = gp.apply_binary(features_train)
    error_train = error_eval.evaluate(pred_labels_train, labels_train)
    #print "\nInference name:%s"%inf.get_name(),
    #print "marginal likelihood:%.10f"%inf.get_negative_log_marginal_likelihood(),
    #print "Training error %.4f"%error_train
    return pred_labels_train, gp, pred_labels_train.get_labels()
def gaussian_process_binary_classification_laplace(X_train,
                                                   y_train,
                                                   n_test=50):

    # import all necessary modules from Shogun (some of them require Eigen3)
    try:
        from modshogun import RealFeatures, BinaryLabels, GaussianKernel, \
            LogitLikelihood, ProbitLikelihood, ZeroMean, LaplacianInferenceMethod, \
            EPInferenceMethod, GaussianProcessClassification
    except ImportError:
        print('Eigen3 needed for Gaussian Processes')
        return

    # convert training data into Shogun representation
    train_features = RealFeatures(X_train)
    train_labels = BinaryLabels(y_train)

    # generate all pairs in 2d range of testing data
    x1 = linspace(X_train[0, :].min() - 1, X_train[0, :].max() + 1, n_test)
    x2 = linspace(X_train[1, :].min() - 1, X_train[1, :].max() + 1, n_test)
    X_test = asarray(list(product(x1, x2))).T

    # convert testing features into Shogun representation
    test_features = RealFeatures(X_test)

    # create Gaussian kernel with width = 2.0
    kernel = GaussianKernel(10, 2.0)

    # create zero mean function
    mean = ZeroMean()

    # you can easily switch between probit and logit likelihood models
    # by uncommenting/commenting the following lines:

    # create probit likelihood model
    # lik = ProbitLikelihood()

    # create logit likelihood model
    lik = LogitLikelihood()

    # you can easily switch between Laplace and EP approximation by
    # uncommenting/commenting the following lines:

    # specify Laplace approximation inference method
    # inf = LaplacianInferenceMethod(kernel, train_features, mean, train_labels, lik)

    # specify EP approximation inference method
    inf = EPInferenceMethod(kernel, train_features, mean, train_labels, lik)

    # create and train GP classifier, which uses Laplace approximation
    gp = GaussianProcessClassification(inf)
    gp.train()

    # get probabilities p(y*=1|x*) for each testing feature x*
    p_test = gp.get_probabilities(test_features)

    # create figure
    figure()
    title('Training examples, predictive probability and decision boundary')

    # plot training data
    plot(X_train[0, argwhere(y_train == 1)], X_train[1,
                                                     argwhere(y_train == 1)],
         'ro')
    plot(X_train[0, argwhere(y_train == -1)], X_train[1,
                                                      argwhere(y_train == -1)],
         'bo')

    # plot decision boundary
    contour(x1,
            x2,
            reshape(p_test, (n_test, n_test)),
            levels=[0.5],
            colors=('black'))

    # plot probabilities
    pcolor(x1, x2, reshape(p_test, (n_test, n_test)))

    # show color bar
    colorbar()

    # show figure
    show()
def gaussian_process_binary_classification_laplace(X_train, y_train, n_test=50):

    # import all necessary modules from Shogun (some of them require Eigen3)
    try:
        from modshogun import RealFeatures, BinaryLabels, GaussianKernel, \
            LogitLikelihood, ProbitLikelihood, ZeroMean, SingleLaplacianInferenceMethod, \
            EPInferenceMethod, GaussianProcessClassification
    except ImportError:
        print('Eigen3 needed for Gaussian Processes')
        return

    # convert training data into Shogun representation
    train_features = RealFeatures(X_train)
    train_labels = BinaryLabels(y_train)

    # generate all pairs in 2d range of testing data
    x1 = linspace(X_train[0,:].min()-1, X_train[0,:].max()+1, n_test)
    x2 = linspace(X_train[1,:].min()-1, X_train[1,:].max()+1, n_test)
    X_test = asarray(list(product(x1, x2))).T

    # convert testing features into Shogun representation
    test_features = RealFeatures(X_test)

    # create Gaussian kernel with width = 2.0
    kernel = GaussianKernel(10, 2.0)

    # create zero mean function
    mean = ZeroMean()

    # you can easily switch between probit and logit likelihood models
    # by uncommenting/commenting the following lines:

    # create probit likelihood model
    # lik = ProbitLikelihood()

    # create logit likelihood model
    lik = LogitLikelihood()

    # you can easily switch between Laplace and EP approximation by
    # uncommenting/commenting the following lines:

    # specify Laplace approximation inference method
    # inf = SingleLaplacianInferenceMethod(kernel, train_features, mean, train_labels, lik)

    # specify EP approximation inference method
    inf = EPInferenceMethod(kernel, train_features, mean, train_labels, lik)

    # create and train GP classifier, which uses Laplace approximation
    gp = GaussianProcessClassification(inf)
    gp.train()

    # get probabilities p(y*=1|x*) for each testing feature x*
    p_test = gp.get_probabilities(test_features)

    # create figure
    figure()
    title('Training examples, predictive probability and decision boundary')

    # plot training data
    plot(X_train[0, argwhere(y_train == 1)], X_train[1, argwhere(y_train == 1)], 'ro')
    plot(X_train[0, argwhere(y_train == -1)], X_train[1, argwhere(y_train == -1)], 'bo')

    # plot decision boundary
    contour(x1, x2, reshape(p_test, (n_test, n_test)), levels=[0.5], colors=('black'))

    # plot probabilities
    pcolor(x1, x2, reshape(p_test, (n_test, n_test)))

    # show color bar
    colorbar()

    # show figure
    show()