Example #1
0
def libsvm ():
	print 'LibSVM'

	from shogun.Features import RealFeatures, Labels
	from shogun.Kernel import GaussianKernel
	from shogun.Evaluation import PerformanceMeasures
	from shogun.Classifier import LibSVM

	feats_train=RealFeatures(fm_train_real)
	feats_test=RealFeatures(fm_test_real)

	width=2.1
	kernel=GaussianKernel(feats_train, feats_train, width)

	C=1
	epsilon=1e-5
	labels=Labels(label_train_twoclass)

	svm=LibSVM(C, kernel, labels)
	svm.set_epsilon(epsilon)
	svm.train()

	#kernel.init(feats_train, feats_test)
	output = svm.classify(feats_test)#.get_labels()
        #output_vector = output.get_labels()
        out=svm.classify().get_labels()
        testerr=mean(sign(out)!=testlab)
        print testerr
Example #2
0
def classifier_libsvm_modular(fm_train_real=traindat,
                              fm_test_real=testdat,
                              label_train_twoclass=label_traindat,
                              width=2.1,
                              C=1,
                              epsilon=1e-5):
    from shogun.Features import RealFeatures, Labels
    from shogun.Kernel import GaussianKernel
    from shogun.Classifier import LibSVM

    feats_train = RealFeatures(fm_train_real)
    feats_test = RealFeatures(fm_test_real)

    kernel = GaussianKernel(feats_train, feats_train, width)
    labels = Labels(label_train_twoclass)

    svm = LibSVM(C, kernel, labels)
    svm.set_epsilon(epsilon)
    svm.train()

    kernel.init(feats_train, feats_test)
    labels = svm.classify().get_labels()
    supportvectors = sv_idx = svm.get_support_vectors()
    alphas = svm.get_alphas()
    predictions = svm.classify()
    return predictions, svm, predictions.get_labels()
Example #3
0
def libsvm():
    print 'LibSVM'

    from shogun.Features import RealFeatures, Labels
    from shogun.Kernel import GaussianKernel
    from shogun.Evaluation import PerformanceMeasures
    from shogun.Classifier import LibSVM

    feats_train = RealFeatures(fm_train_real)
    feats_test = RealFeatures(fm_test_real)

    width = 2.1
    kernel = GaussianKernel(feats_train, feats_train, width)

    C = 1
    epsilon = 1e-5
    labels = Labels(label_train_twoclass)

    svm = LibSVM(C, kernel, labels)
    svm.set_epsilon(epsilon)
    svm.train()

    #kernel.init(feats_train, feats_test)
    output = svm.classify(feats_test)  #.get_labels()
    #output_vector = output.get_labels()
    out = svm.classify().get_labels()
    testerr = mean(sign(out) != testlab)
    print testerr
def kernel_combined_custom_poly_modular(fm_train_real=traindat,
                                        fm_test_real=testdat,
                                        fm_label_twoclass=label_traindat):
    from shogun.Features import CombinedFeatures, RealFeatures, Labels
    from shogun.Kernel import CombinedKernel, PolyKernel, CustomKernel
    from shogun.Classifier import LibSVM

    kernel = CombinedKernel()
    feats_train = CombinedFeatures()

    tfeats = RealFeatures(fm_train_real)
    tkernel = PolyKernel(10, 3)
    tkernel.init(tfeats, tfeats)
    K = tkernel.get_kernel_matrix()
    kernel.append_kernel(CustomKernel(K))

    subkfeats_train = RealFeatures(fm_train_real)
    feats_train.append_feature_obj(subkfeats_train)
    subkernel = PolyKernel(10, 2)
    kernel.append_kernel(subkernel)

    kernel.init(feats_train, feats_train)

    labels = Labels(fm_label_twoclass)
    svm = LibSVM(1.0, kernel, labels)
    svm.train()

    kernel = CombinedKernel()
    feats_pred = CombinedFeatures()

    pfeats = RealFeatures(fm_test_real)
    tkernel = PolyKernel(10, 3)
    tkernel.init(tfeats, pfeats)
    K = tkernel.get_kernel_matrix()
    kernel.append_kernel(CustomKernel(K))

    subkfeats_test = RealFeatures(fm_test_real)
    feats_pred.append_feature_obj(subkfeats_test)
    subkernel = PolyKernel(10, 2)
    kernel.append_kernel(subkernel)
    kernel.init(feats_train, feats_pred)

    svm.set_kernel(kernel)
    svm.classify()
    km_train = kernel.get_kernel_matrix()
    return km_train, kernel
def kernel_combined_custom_poly_modular(fm_train_real = traindat,fm_test_real = testdat,fm_label_twoclass=label_traindat):
    from shogun.Features import CombinedFeatures, RealFeatures, Labels
    from shogun.Kernel import CombinedKernel, PolyKernel, CustomKernel
    from shogun.Classifier import LibSVM
   
    kernel = CombinedKernel()
    feats_train = CombinedFeatures()
    
    tfeats = RealFeatures(fm_train_real)
    tkernel = PolyKernel(10,3)
    tkernel.init(tfeats, tfeats)
    K = tkernel.get_kernel_matrix()
    kernel.append_kernel(CustomKernel(K))
        
    subkfeats_train = RealFeatures(fm_train_real)
    feats_train.append_feature_obj(subkfeats_train)
    subkernel = PolyKernel(10,2)
    kernel.append_kernel(subkernel)

    kernel.init(feats_train, feats_train)
    
    labels = Labels(fm_label_twoclass)
    svm = LibSVM(1.0, kernel, labels)
    svm.train()

    kernel = CombinedKernel()
    feats_pred = CombinedFeatures()

    pfeats = RealFeatures(fm_test_real)
    tkernel = PolyKernel(10,3)
    tkernel.init(tfeats, pfeats)
    K = tkernel.get_kernel_matrix()
    kernel.append_kernel(CustomKernel(K))

    subkfeats_test = RealFeatures(fm_test_real)
    feats_pred.append_feature_obj(subkfeats_test)
    subkernel = PolyKernel(10, 2)
    kernel.append_kernel(subkernel)
    kernel.init(feats_train, feats_pred)

    svm.set_kernel(kernel)
    svm.classify()
    km_train=kernel.get_kernel_matrix()
    return km_train,kernel
def classifier_libsvm_modular (fm_train_real=traindat,fm_test_real=testdat,label_train_twoclass=label_traindat,width=2.1,C=1,epsilon=1e-5):
	from shogun.Features import RealFeatures, Labels
	from shogun.Kernel import GaussianKernel
	from shogun.Classifier import LibSVM

	feats_train=RealFeatures(fm_train_real)
	feats_test=RealFeatures(fm_test_real)
	
	kernel=GaussianKernel(feats_train, feats_train, width)
	labels=Labels(label_train_twoclass)

	svm=LibSVM(C, kernel, labels)
	svm.set_epsilon(epsilon)
	svm.train()

	kernel.init(feats_train, feats_test)
	labels = svm.classify().get_labels()
	supportvectors = sv_idx=svm.get_support_vectors()
	alphas=svm.get_alphas()
	predictions = svm.classify()
	return predictions, svm, predictions.get_labels()
def classifier_custom_kernel_modular(C=1, dim=7):
    from shogun.Features import RealFeatures, Labels
    from shogun.Kernel import CustomKernel
    from shogun.Classifier import LibSVM

    from numpy import diag, ones, sign
    from numpy.random import rand, seed

    seed((C, dim))

    lab = sign(2 * rand(dim) - 1)
    data = rand(dim, dim)
    symdata = data * data.T + diag(ones(dim))

    kernel = CustomKernel()
    kernel.set_full_kernel_matrix_from_full(data)
    labels = Labels(lab)
    svm = LibSVM(C, kernel, labels)
    svm.train()
    predictions = svm.classify()
    out = svm.classify().get_labels()
    return svm, out
def classifier_custom_kernel_modular(C=1,dim=7):
	from shogun.Features import RealFeatures, Labels
	from shogun.Kernel import CustomKernel
	from shogun.Classifier import LibSVM

	from numpy import diag,ones,sign
	from numpy.random import rand,seed

	seed((C,dim))

	lab=sign(2*rand(dim) - 1)
	data=rand(dim, dim)
	symdata=data*data.T + diag(ones(dim))
    
	kernel=CustomKernel()
	kernel.set_full_kernel_matrix_from_full(data)
	labels=Labels(lab)
	svm=LibSVM(C, kernel, labels)
	svm.train()
	predictions =svm.classify() 
	out=svm.classify().get_labels()
	return svm,out
def libsvm ():
	print 'LibSVM'

	from shogun.Features import RealFeatures, Labels
	from shogun.Kernel import GaussianKernel
	from shogun.Classifier import LibSVM

	feats_train=RealFeatures(fm_train_real)
	feats_test=RealFeatures(fm_test_real)
	width=2.1
	kernel=GaussianKernel(feats_train, feats_train, width)

	C=1
	epsilon=1e-5
	labels=Labels(label_train_twoclass)

	svm=LibSVM(C, kernel, labels)
	svm.set_epsilon(epsilon)
	svm.train()

	kernel.init(feats_train, feats_test)
	svm.classify().get_labels()
	sv_idx=svm.get_support_vectors()
	alphas=svm.get_alphas()
Example #10
0
def bench_shogun(X, y, T, valid):
#
#       .. Shogun ..
#
    from shogun.Classifier import LibSVM
    from shogun.Features import RealFeatures, Labels
    from shogun.Kernel import GaussianKernel
    start = datetime.now()
    feat = RealFeatures(X.T)
    feat_test = RealFeatures(T.T)
    labels = Labels(y.astype(np.float64))
    kernel = GaussianKernel(feat, feat, sigma)
    shogun_svm = LibSVM(1., kernel, labels)
    shogun_svm.train()
    dec_func = shogun_svm.classify(feat_test).get_labels()
    score = np.mean(np.sign(dec_func) == valid)
    return score, datetime.now() - start
def bench_shogun(X, y, T, valid):
#
#       .. Shogun ..
#
    from shogun.Classifier import LibSVM
    from shogun.Features import RealFeatures, Labels
    from shogun.Kernel import GaussianKernel
    start = datetime.now()
    feat = RealFeatures(X.T)
    feat_test = RealFeatures(T.T)
    labels = Labels(y.astype(np.float64))
    kernel = GaussianKernel(feat, feat, sigma)
    shogun_svm = LibSVM(1., kernel, labels)
    shogun_svm.train()
    dec_func = shogun_svm.classify(feat_test).get_labels()
    score = np.mean(np.sign(dec_func) == valid)
    return score, datetime.now() - start
Example #12
0
File: prc.py Project: frx/shogun
svm = LibSVM(1000.0, gk, labels)
svm.train()
lda=LDA(1,features,labels)
lda.train()

## plot points
subplot(211)
plot(pos[0,:], pos[1,:], "r.")
plot(neg[0,:], neg[1,:], "b.")
grid(True)
title('Data',size=10)

# plot PRC for SVM
subplot(223)
PRC_evaluation=PRCEvaluation()
PRC_evaluation.evaluate(svm.classify(),labels)
PRC = PRC_evaluation.get_PRC()
plot(PRC[:,0], PRC[:,1])
fill_between(PRC[:,0],PRC[:,1],0,alpha=0.1)
text(0.55,mean(PRC[:,1])/3,'auPRC = %.5f' % PRC_evaluation.get_auPRC())
grid(True)
xlabel('Precision')
ylabel('Recall')
title('LibSVM (Gaussian kernel, C=%.3f) PRC curve' % svm.get_C1(),size=10)

# plot PRC for LDA
subplot(224)
PRC_evaluation.evaluate(lda.classify(),labels)
PRC = PRC_evaluation.get_PRC()
plot(PRC[:,0], PRC[:,1])
fill_between(PRC[:,0],PRC[:,1],0,alpha=0.1)
Example #13
0
epsilon = 1e-5
tube_epsilon = 1e-2
svm = LibSVM()
svm.set_C(C, C)
svm.set_epsilon(epsilon)
svm.set_tube_epsilon(tube_epsilon)

for i in xrange(3):
    data_train = random.rand(num_feats, num_vec)
    data_test = random.rand(num_feats, num_vec)
    feats_train = RealFeatures(data_train)
    feats_test = RealFeatures(data_test)
    labels = Labels(random.rand(num_vec).round() * 2 - 1)

    svm.set_kernel(LinearKernel(size_cache, scale))
    svm.set_labels(labels)

    kernel = svm.get_kernel()
    print "kernel cache size: %s" % (kernel.get_cache_size())

    kernel.init(feats_test, feats_test)
    svm.train()

    kernel.init(feats_train, feats_test)
    print svm.classify().get_labels()

    #kernel.remove_lhs_and_rhs()

    #import pdb
    #pdb.set_trace()
tube_epsilon=1e-2
svm=LibSVM()
svm.set_C(C, C)
svm.set_epsilon(epsilon)
svm.set_tube_epsilon(tube_epsilon)

for i in xrange(3):
	data_train=random.rand(num_feats, num_vec)
	data_test=random.rand(num_feats, num_vec)
	feats_train=RealFeatures(data_train)
	feats_test=RealFeatures(data_test)
	labels=Labels(random.rand(num_vec).round()*2-1)

	svm.set_kernel(LinearKernel(size_cache, scale))
	svm.set_labels(labels)

	kernel=svm.get_kernel()
	print "kernel cache size: %s" % (kernel.get_cache_size())

	kernel.init(feats_test, feats_test)
	svm.train()

	kernel.init(feats_train, feats_test)
	print svm.classify().get_labels()

	#kernel.remove_lhs_and_rhs()

	#import pdb
	#pdb.set_trace()

Example #15
0
        inner_sum = inner_sum + alpha * kv.kernel(sv_id, idx)

    inner.append(inner_sum)

    #general case
    linterm_manual[idx] = B * tmp_lab[idx] * inner_sum - 1.0

################
# compare pre-svms

assert (presvm_liblinear.get_bias() == 0.0)
assert (presvm_libsvm.get_bias() == 0.0)

tmp_out = presvm_liblinear.classify(feat).get_labels()
tmp_out2 = presvm_libsvm.classify(feat).get_labels()

# compare outputs
for i in xrange(N):

    try:
        assert (abs(inner[i] - tmp_out[i]) <= 0.001)
        assert (abs(inner[i] - tmp_out2[i]) <= 0.001)
    except Exception, message:
        print "difference in outputs: (%.4f, %.4f, %.4f)" % (tmp_out[i],
                                                             tmp_out2[i])

###############
# compare to LibSVM

dasvm_manual_libsvm = LibSVM(1.0, wdk, lab)
        
    inner.append(inner_sum)


    #general case
    linterm_manual[idx] = B *tmp_lab[idx] * inner_sum - 1.0


################
# compare pre-svms

assert(presvm_liblinear.get_bias() == 0.0)
assert(presvm_libsvm.get_bias() == 0.0)

tmp_out = presvm_liblinear.classify(feat).get_labels()
tmp_out2 = presvm_libsvm.classify(feat).get_labels()


# compare outputs
for i in xrange(N):
    
    try:
        assert(abs(inner[i]-tmp_out[i])<= 0.001)
        assert(abs(inner[i]-tmp_out2[i])<= 0.001)
    except Exception, message:
        print "difference in outputs: (%.4f, %.4f, %.4f)" % (tmp_out[i], tmp_out2[i])



###############
# compare to LibSVM
from numpy import *
from numpy.random import rand
from shogun.Features import RealFeatures, Labels
from shogun.Kernel import CustomKernel
from shogun.Classifier import LibSVM

C=1
dim=7

lab=sign(2*rand(dim) - 1)
data=rand(dim, dim)
symdata=data*data.T

kernel=CustomKernel()
kernel.set_full_kernel_matrix_from_full(data)
labels=Labels(lab)
svm=LibSVM(C, kernel, labels)
svm.train()
out=svm.classify().get_labels()