#!/usr/bin/env python # -*- coding: latin-1 -*- from pylab import figure,pcolor,scatter,contour,colorbar,show,subplot,connect,axis from numpy import concatenate from numpy.random import randn from shogun.Features import * from shogun.Classifier import * from shogun.Kernel import * import util util.set_title('Multiple SVMS') num_svms=6 width=0.5 svmList = [None]*num_svms trainfeatList = [None]*num_svms traindatList = [None]*num_svms trainlabList = [None]*num_svms trainlabsList = [None]*num_svms kernelList = [None]*num_svms for i in range(num_svms): pos=util.get_realdata(True) neg=util.get_realdata(False) traindatList[i] = concatenate((pos, neg), axis=1) trainfeatList[i] = util.get_realfeatures(pos, neg) trainlabsList[i] = util.get_labels(True) trainlabList[i] = util.get_labels() kernelList[i] = GaussianKernel(trainfeatList[i], trainfeatList[i], width)
from pylab import plot,grid,title,subplot,xlabel,ylabel,text,subplots_adjust,fill_between,mean,connect,show from shogun.Kernel import GaussianKernel from shogun.Classifier import LibSVM, LDA from shogun.Evaluation import PRCEvaluation import util util.set_title('PRC example') util.DISTANCE=0.5 subplots_adjust(hspace=0.3) pos=util.get_realdata(True) neg=util.get_realdata(False) features=util.get_realfeatures(pos, neg) labels=util.get_labels() # classifiers gk=GaussianKernel(features, features, 1.0) svm = LibSVM(1000.0, gk, labels) svm.train() lda=LDA(1,features,labels) lda.train() ## plot points subplot(211) plot(pos[0,:], pos[1,:], "r.") plot(neg[0,:], neg[1,:], "b.") grid(True) title('Data',size=10) # plot PRC for SVM subplot(223)
from pylab import figure, pcolor, scatter, contour, colorbar, show, subplot, plot, legend, connect from shogun.Features import * from shogun.Regression import * from shogun.Kernel import * import util util.set_title("SVR on Sinus") X, Y = util.get_sinedata() C = 10 width = 0.5 epsilon = 0.01 feat = RealFeatures(X) lab = RegressionLabels(Y.flatten()) gk = GaussianKernel(feat, feat, width) # svr = SVRLight(C, epsilon, gk, lab) svr = LibSVR(C, epsilon, gk, lab) svr.train() plot(X, Y, ".", label="train data") plot(X[0], svr.apply().get_labels(), hold=True, label="train output") XE, YE = util.compute_output_plot_isolines_sine(svr, gk, feat, regression=True) plot(XE[0], YE, hold=True, label="test output") connect("key_press_event", util.quit) show()
from pylab import plot,grid,title,subplot,xlabel,ylabel,text,subplots_adjust,fill_between,mean,connect,show from shogun import GaussianKernel from shogun import LibSVM, LDA from shogun import ROCEvaluation import util util.set_title('ROC example') util.DISTANCE=0.5 subplots_adjust(hspace=0.3) pos=util.get_realdata(True) neg=util.get_realdata(False) features=util.get_realfeatures(pos, neg) labels=util.get_labels() # classifiers gk=GaussianKernel(features, features, 1.0) svm = LibSVM(1000.0, gk, labels) svm.train() lda=LDA(1,features,labels) lda.train() ## plot points subplot(211) plot(pos[0,:], pos[1,:], "r.") plot(neg[0,:], neg[1,:], "b.") grid(True) title('Data',size=10) # plot ROC for SVM subplot(223)
# Based on svm.py example from Shogun import pylab import numpy import util import latex_plot_inits from shogun.Features import * from shogun.Classifier import * from shogun.Kernel import * util.set_title('SVM') util.NUM_EXAMPLES = 200 width = 5 # positive examples pos = util.get_realdata(True) pylab.plot(pos[0, :], pos[1, :], "rs") # negative examples neg = util.get_realdata(False) pylab.plot(neg[0, :], neg[1, :], "bo") # train svm labels = util.get_labels() train = util.get_realfeatures(pos, neg) gk = GaussianKernel(train, train, width) svm = LibSVM(10.0, gk, labels) svm.train()
from pylab import figure,pcolor,scatter,contour,colorbar,show,subplot,plot,legend,connect from modshogun import * import util util.set_title('KernelRidgeRegression on Sine') X, Y=util.get_sinedata() width=1 feat=RealFeatures(X) lab=RegressionLabels(Y.flatten()) gk=GaussianKernel(feat, feat, width) krr=KernelRidgeRegression() krr.set_labels(lab) krr.set_kernel(gk) krr.set_tau(1e-6) krr.train() plot(X, Y, '.', label='train data') plot(X[0], krr.apply().get_labels(), hold=True, label='train output') XE, YE=util.compute_output_plot_isolines_sine(krr, gk, feat, regression=True) YE200=krr.apply_one(200) plot(XE[0], YE, hold=True, label='test output') plot([XE[0,200]], [YE200], '+', hold=True) #print YE[200], YE200 connect('key_press_event', util.quit) show()
from pylab import figure,pcolor,scatter,contour,colorbar,show,subplot,plot,connect,axis from numpy.random import randn from shogun.Features import * from shogun.Classifier import * from shogun.Kernel import * import util util.set_title('SVM') util.NUM_EXAMPLES=200 width=5 # positive examples pos=util.get_realdata(True) plot(pos[0,:], pos[1,:], "r.") # negative examples neg=util.get_realdata(False) plot(neg[0,:], neg[1,:], "b.") # train svm labels=util.get_labels() train=util.get_realfeatures(pos, neg) gk=GaussianKernel(train, train, width) svm = LibSVM(10.0, gk, labels) svm.train() x, y, z=util.compute_output_plot_isolines(svm, gk, train) pcolor(x, y, z, shading='interp') contour(x, y, z, linewidths=1, colors='black', hold=True) axis('tight')
from pylab import figure,scatter,contour,show,legend,connect from numpy import array, append, arange, reshape, empty, exp from shogun.Distribution import Gaussian, GMM from shogun.Features import RealFeatures import util util.set_title('SMEM for 2d GMM example') #set the parameters max_iter=100 max_cand=5 min_cov=1e-9 max_em_iter=1000 min_change=1e-9 cov_type=0 #setup the real GMM real_gmm=GMM(3) real_gmm.set_nth_mean(array([2.0, 2.0]), 0) real_gmm.set_nth_mean(array([-2.0, -2.0]), 1) real_gmm.set_nth_mean(array([2.0, -2.0]), 2) real_gmm.set_nth_cov(array([[1.0, 0.2],[0.2, 0.5]]), 0) real_gmm.set_nth_cov(array([[0.2, 0.1],[0.1, 0.5]]), 1) real_gmm.set_nth_cov(array([[0.3, -0.2],[-0.2, 0.8]]), 2) real_gmm.set_coef(array([0.3, 0.4, 0.3])) #generate training set from real GMM generated=array([real_gmm.sample()])
from sg import sg from pylab import plot, show, connect from numpy import array,transpose,sin,double import util util.set_title('SVR Regression') sg('new_regression', 'LIBSVR') features=array([range(0,100)],dtype=double) features.resize(1,100) labels=sin(features)[0] sg('set_features', "TRAIN", features) sg('set_labels', "TRAIN", labels) sg('set_kernel', 'GAUSSIAN', 'REAL', 20, 10.) sg('c', 1.) sg('train_regression') [bias, alphas]=sg('get_svm'); sg('set_features', "TEST", features) out=sg('classify'); plot(features[0],labels,'b-') plot(features[0],labels,'bo') plot(features[0],out,'r-') plot(features[0],out,'ro') connect('key_press_event', util.quit) show()
from pylab import figure, pcolor, scatter, contour, colorbar, show, subplot, plot, connect from numpy import array, meshgrid, reshape, linspace, min, max from numpy import concatenate, transpose, ravel from shogun.Features import * from shogun.Regression import * from shogun.Kernel import * import util util.set_title('KRR') width = 20 # positive examples pos = util.get_realdata(True) plot(pos[0, :], pos[1, :], "r.") # negative examples neg = util.get_realdata(False) plot(neg[0, :], neg[1, :], "b.") # train svm labels = util.get_labels() train = util.get_realfeatures(pos, neg) gk = GaussianKernel(train, train, width) krr = KRR() krr.set_labels(labels) krr.set_kernel(gk) krr.set_tau(1e-3) krr.train() # compute output plot iso-lines
from sg import sg from pylab import pcolor, scatter, contour, colorbar, show, imshow, connect from numpy import min, max, where import util util.set_title('SVM Classification') #sg('loglevel', 'ALL') traindata = util.get_traindata() labels = util.get_labels() width = 1. size_cache = 10 sg('set_features', 'TRAIN', traindata) sg('set_labels', 'TRAIN', labels) sg('set_kernel', 'GAUSSIAN', 'REAL', size_cache, width) sg('new_classifier', 'LIBSVM') sg('c', 100.) sg('train_classifier') [bias, alphas] = sg('get_svm') #print bias #print alphas #print "objective: %f" % sg('get_svm_objective') x, y = util.get_meshgrid(traindata) testdata = util.get_testdata(x, y) sg('set_features', 'TEST', testdata) z = sg('classify') z.resize((50, 50))
from pylab import figure,pcolor,scatter,contour,colorbar,show,subplot,plot,connect from modshogun import * from modshogun import * import util util.set_title('LDA') util.DISTANCE=0.5 gamma=0.1 # positive examples pos=util.get_realdata(True) plot(pos[0,:], pos[1,:], "r.") # negative examples neg=util.get_realdata(False) plot(neg[0,:], neg[1,:], "b.") # train lda labels=util.get_labels() features=util.get_realfeatures(pos, neg) lda=LDA(gamma, features, labels) lda.train() # compute output plot iso-lines x, y, z=util.compute_output_plot_isolines(lda) c=pcolor(x, y, z, shading='interp') contour(x, y, z, linewidths=1, colors='black', hold=True) colorbar(c)
from pylab import figure, pcolor, scatter, contour, colorbar, show, subplot, plot, legend, connect from shogun.Features import * from shogun.Regression import * from shogun.Kernel import * import util util.set_title('KRR on Sine') X, Y = util.get_sinedata() width = 1 feat = RealFeatures(X) lab = Labels(Y.flatten()) gk = GaussianKernel(feat, feat, width) krr = KRR() krr.set_labels(lab) krr.set_kernel(gk) krr.set_tau(1e-6) krr.train() plot(X, Y, '.', label='train data') plot(X[0], krr.classify().get_labels(), hold=True, label='train output') XE, YE = util.compute_output_plot_isolines_sine(krr, gk, feat) YE200 = krr.classify_example(200) plot(XE[0], YE, hold=True, label='test output') plot([XE[0, 200]], [YE200], '+', hold=True) #print YE[200], YE200 connect('key_press_event', util.quit)
from pylab import figure, show, connect, hist, plot, legend from numpy import array, append, arange, empty, exp from shogun.Distribution import Gaussian, GMM from shogun.Features import RealFeatures import util util.set_title("EM for 1d GMM example") # set the parameters min_cov = 1e-9 max_iter = 1000 min_change = 1e-9 # setup the real GMM real_gmm = GMM(3) real_gmm.set_nth_mean(array([-2.0]), 0) real_gmm.set_nth_mean(array([0.0]), 1) real_gmm.set_nth_mean(array([2.0]), 2) real_gmm.set_nth_cov(array([[0.3]]), 0) real_gmm.set_nth_cov(array([[0.1]]), 1) real_gmm.set_nth_cov(array([[0.2]]), 2) real_gmm.set_coef(array([0.3, 0.5, 0.2])) # generate training set from real GMM generated = array([real_gmm.sample()]) for i in range(199): generated = append(generated, array([real_gmm.sample()]), axis=1)
from sg import sg from pylab import pcolor, scatter, contour, colorbar, show, imshow, connect from numpy import min, max, where import util util.set_title('SVM Classification') #sg('loglevel', 'ALL') traindata=util.get_traindata() labels=util.get_labels() width=1. size_cache=10 sg('set_features', 'TRAIN', traindata) sg('set_labels', 'TRAIN', labels) sg('set_kernel', 'GAUSSIAN', 'REAL', size_cache, width) sg('new_classifier', 'LIBSVM') sg('c', 100.) sg('train_classifier') [bias, alphas]=sg('get_svm') #print bias #print alphas #print "objective: %f" % sg('get_svm_objective') x, y=util.get_meshgrid(traindata) testdata=util.get_testdata(x, y) sg('set_features', 'TEST', testdata) z=sg('classify') z.resize((50,50))
from pylab import figure,pcolor,scatter,contour,colorbar,show,subplot,plot,legend, connect from shogun.Features import * from shogun.Regression import * from shogun.Kernel import * import util util.set_title('SVR on Sinus') X, Y=util.get_sinedata() C=10 width=0.5 epsilon=0.01 feat = RealFeatures(X) lab = Labels(Y.flatten()) gk=GaussianKernel(feat,feat, width) #svr = SVRLight(C, epsilon, gk, lab) svr = LibSVR(C, epsilon, gk, lab) svr.train() plot(X, Y, '.', label='train data') plot(X[0], svr.apply().get_labels(), hold=True, label='train output') XE, YE=util.compute_output_plot_isolines_sine(svr, gk, feat) plot(XE[0], YE, hold=True, label='test output') connect('key_press_event', util.quit) show()
from pylab import plot, grid, title, subplot, xlabel, ylabel, text, subplots_adjust, fill_between, mean, connect, show from shogun import GaussianKernel from shogun import LibSVM, LDA from shogun import ROCEvaluation import util util.set_title('ROC example') util.DISTANCE = 0.5 subplots_adjust(hspace=0.3) pos = util.get_realdata(True) neg = util.get_realdata(False) features = util.get_realfeatures(pos, neg) labels = util.get_labels() # classifiers gk = GaussianKernel(features, features, 1.0) svm = LibSVM(1000.0, gk, labels) svm.train() lda = LDA(1, features, labels) lda.train() ## plot points subplot(211) plot(pos[0, :], pos[1, :], "r.") plot(neg[0, :], neg[1, :], "b.") grid(True) title('Data', size=10) # plot ROC for SVM subplot(223)
from pylab import figure,pcolor,scatter,contour,colorbar,show,subplot,plot,connect from numpy import array,meshgrid,reshape,linspace,min,max from numpy import concatenate,transpose,ravel from modshogun import * from modshogun import * from modshogun import * import util util.set_title('KernelRidgeRegression') width=20 # positive examples pos=util.get_realdata(True) plot(pos[0,:], pos[1,:], "r.") # negative examples neg=util.get_realdata(False) plot(neg[0,:], neg[1,:], "b.") # train svm labels = util.get_labels(type='regression') train = util.get_realfeatures(pos, neg) gk=GaussianKernel(train, train, width) krr = KernelRidgeRegression() krr.set_labels(labels) krr.set_kernel(gk) krr.set_tau(1e-3) krr.train() # compute output plot iso-lines
from pylab import figure, pcolor, scatter, contour, colorbar, show, subplot, plot, connect from modshogun import * import util util.set_title('LDA') util.DISTANCE = 0.5 gamma = 0.1 # positive examples pos = util.get_realdata(True) plot(pos[0, :], pos[1, :], "r.") # negative examples neg = util.get_realdata(False) plot(neg[0, :], neg[1, :], "b.") # train lda labels = util.get_labels() features = util.get_realfeatures(pos, neg) lda = LDA(gamma, features, labels) lda.train() # compute output plot iso-lines x, y, z = util.compute_output_plot_isolines(lda) c = pcolor(x, y, z) contour(x, y, z, linewidths=1, colors='black', hold=True) colorbar(c) connect('key_press_event', util.quit)
from pylab import figure, pcolor, scatter, contour, colorbar, show, subplot, plot, legend, connect from shogun.Features import * from shogun.Regression import * from shogun.Kernel import * import util util.set_title('SVR on Sinus') X, Y = util.get_sinedata() C = 10 width = 0.5 epsilon = 0.01 feat = RealFeatures(X) lab = Labels(Y.flatten()) gk = GaussianKernel(feat, feat, width) #svr = SVRLight(C, epsilon, gk, lab) svr = LibSVR(C, epsilon, gk, lab) svr.train() plot(X, Y, '.', label='train data') plot(X[0], svr.apply().get_labels(), hold=True, label='train output') XE, YE = util.compute_output_plot_isolines_sine(svr, gk, feat) plot(XE[0], YE, hold=True, label='test output') connect('key_press_event', util.quit) show()
#!/usr/bin/env python # -*- coding: latin-1 -*- from pylab import figure, pcolor, scatter, contour, colorbar, show, subplot, connect, axis from numpy import concatenate from numpy.random import randn from modshogun import * import util util.set_title('Multiple SVMS') num_svms = 6 width = 0.5 svmList = [None] * num_svms trainfeatList = [None] * num_svms traindatList = [None] * num_svms trainlabList = [None] * num_svms trainlabsList = [None] * num_svms kernelList = [None] * num_svms for i in range(num_svms): pos = util.get_realdata(True) neg = util.get_realdata(False) traindatList[i] = concatenate((pos, neg), axis=1) trainfeatList[i] = util.get_realfeatures(pos, neg) trainlabsList[i] = util.get_labels(True) trainlabList[i] = util.get_labels() kernelList[i] = GaussianKernel(trainfeatList[i], trainfeatList[i], width) svmList[i] = LibSVM(10, kernelList[i], trainlabList[i])
from pylab import figure,show,connect,hist,plot,legend from numpy import array, append, arange, empty from shogun.Distribution import Gaussian, GMM from shogun.Features import RealFeatures import util util.set_title('EM for 1d GMM example') min_cov=1e-9 max_iter=1000 min_change=1e-9 real_gmm=GMM(3) real_gmm.set_nth_mean(array([-2.0]), 0) real_gmm.set_nth_mean(array([0.0]), 1) real_gmm.set_nth_mean(array([2.0]), 2) real_gmm.set_nth_cov(array([[0.3]]), 0) real_gmm.set_nth_cov(array([[0.1]]), 1) real_gmm.set_nth_cov(array([[0.2]]), 2) real_gmm.set_coef(array([0.3, 0.5, 0.2])) generated=array([real_gmm.sample()]) for i in range(199): generated=append(generated, array([real_gmm.sample()]), axis=1) feat_train=RealFeatures(generated) est_gmm=GMM(3) est_gmm.train(feat_train)
from pylab import figure,pcolor,scatter,contour,colorbar,show,subplot,plot,axis, connect from modshogun import * from modshogun import * from modshogun import * import util util.set_title('SVM Linear 1') util.NUM_EXAMPLES=4000 C=1000 # positive examples pos=util.get_realdata(True) # negative examples neg=util.get_realdata(False) # train svm lin labels=util.get_labels() dense=util.get_realfeatures(pos, neg) train=SparseRealFeatures() train.obtain_from_simple(dense) svm=SVMLin(C, train, labels) svm.train() lk=LinearKernel(dense, dense) try: svmlight=LibSVM(C, lk, labels) except NameError: print 'No SVMLight support available' import sys sys.exit(1) svmlight.train()
from pylab import figure,pcolor,scatter,contour,colorbar,show,subplot,plot,legend,connect from shogun.Features import * from shogun.Regression import * from shogun.Kernel import * import util util.set_title('KRR on Sine') X, Y=util.get_sinedata() width=1 feat=RealFeatures(X) lab=Labels(Y.flatten()) gk=GaussianKernel(feat, feat, width) krr=KRR() krr.set_labels(lab) krr.set_kernel(gk) krr.set_tau(1e-6) krr.train() plot(X, Y, '.', label='train data') plot(X[0], krr.classify().get_labels(), hold=True, label='train output') XE, YE=util.compute_output_plot_isolines_sine(krr, gk, feat) YE200=krr.classify_example(200) plot(XE[0], YE, hold=True, label='test output') plot([XE[0,200]], [YE200], '+', hold=True) #print YE[200], YE200
from pylab import figure, scatter, contour, show, legend, connect from numpy import array, append, arange, reshape, empty from shogun.Distribution import Gaussian, GMM from shogun.Features import RealFeatures import util util.set_title('SMEM for 2d GMM example') max_iter = 100 max_cand = 5 min_cov = 1e-9 max_em_iter = 1000 min_change = 1e-9 cov_type = 0 real_gmm = GMM(3) real_gmm.set_nth_mean(array([2.0, 2.0]), 0) real_gmm.set_nth_mean(array([-2.0, -2.0]), 1) real_gmm.set_nth_mean(array([2.0, -2.0]), 2) real_gmm.set_nth_cov(array([[1.0, 0.2], [0.2, 0.5]]), 0) real_gmm.set_nth_cov(array([[0.2, 0.1], [0.1, 0.5]]), 1) real_gmm.set_nth_cov(array([[0.3, -0.2], [-0.2, 0.8]]), 2) real_gmm.set_coef(array([0.3, 0.4, 0.3])) generated = array([real_gmm.sample()]) for i in range(199): generated = append(generated, array([real_gmm.sample()]), axis=0)
from pylab import figure, pcolor, scatter, contour, colorbar, show, subplot, plot, connect from numpy import array, meshgrid, reshape, linspace, min, max from numpy import concatenate, transpose, ravel from shogun import * import util util.set_title('KernelRidgeRegression') width = 20 # positive examples pos = util.get_realdata(True) plot(pos[0, :], pos[1, :], "r.") # negative examples neg = util.get_realdata(False) plot(neg[0, :], neg[1, :], "b.") # train krr labels = util.get_labels(type='regression') train = util.get_realfeatures(pos, neg) gk = GaussianKernel(train, train, width) krr = KernelRidgeRegression() krr.set_labels(labels) krr.set_kernel(gk) krr.set_tau(1e-3) krr.train() # compute output plot iso-lines x, y, z = util.compute_output_plot_isolines(krr, gk, train, regression=True)
from pylab import figure,show,connect,hist,plot,legend from numpy import array, append, arange, empty, exp from modshogun import Gaussian, GMM from modshogun import RealFeatures import util util.set_title('EM for 1d GMM example') #set the parameters min_cov=1e-9 max_iter=1000 min_change=1e-9 #setup the real GMM real_gmm=GMM(3) real_gmm.set_nth_mean(array([-2.0]), 0) real_gmm.set_nth_mean(array([0.0]), 1) real_gmm.set_nth_mean(array([2.0]), 2) real_gmm.set_nth_cov(array([[0.3]]), 0) real_gmm.set_nth_cov(array([[0.1]]), 1) real_gmm.set_nth_cov(array([[0.2]]), 2) real_gmm.set_coef(array([0.3, 0.5, 0.2])) #generate training set from real GMM generated=array([real_gmm.sample()]) for i in range(199): generated=append(generated, array([real_gmm.sample()]), axis=1)
from pylab import figure,pcolor,scatter,contour,colorbar,show,subplot,plot,connect from numpy import array,meshgrid,reshape,linspace,min,max from numpy import concatenate,transpose,ravel from shogun.Features import * from shogun.Regression import * from shogun.Kernel import * import util util.set_title('KRR') width=20 # positive examples pos=util.get_realdata(True) plot(pos[0,:], pos[1,:], "r.") # negative examples neg=util.get_realdata(False) plot(neg[0,:], neg[1,:], "b.") # train svm labels = util.get_labels() train = util.get_realfeatures(pos, neg) gk=GaussianKernel(train, train, width) krr = KRR() krr.set_labels(labels) krr.set_kernel(gk) krr.set_tau(1e-3) krr.train() # compute output plot iso-lines