Example #1
0
############################################################################
# TODO                                                                     #
# You will change this line to vary C.                                     #
############################################################################

C = 1

############################################################################

svm = LinearSVM_twoclass()
svm.theta = np.zeros((XX.shape[1], ))
svm.train(XX, yy, learning_rate=1e-4, C=C, num_iters=50000, verbose=True)

# classify the training data

y_pred = svm.predict(XX)

print "Accuracy on training data = ", metrics.accuracy_score(yy, y_pred)

# visualize the decision boundary

utils.plot_decision_boundary(scaleX, y, svm, 'x1', 'x2', ['neg', 'pos'])
plt.savefig('fig2.pdf')

############################################################################
#  Part  3: Training SVM with a kernel                                     #
#  We train an SVM with an RBF kernel on the data set and the plot the     #
#  learned decision boundary                                               #
############################################################################

# test your Gaussian kernel implementation
Example #2
0
        for x2 in X_train
    ]).reshape(X_val.shape[0], X_train.shape[0])
    scaleKval = scaler.transform(Kval)
    KKval = np.hstack([np.ones((scaleKval.shape[0], 1)), scaleKval])
    for Cval in Cvals:
        for learning_rate in learning_rates:
            for iteration in iterations:
                svm = LinearSVM_twoclass()
                svm.theta = np.zeros((KK.shape[1], ))
                svm.train(KK,
                          y_train,
                          learning_rate=learning_rate,
                          C=Cval,
                          num_iters=iteration,
                          verbose=True)
                y_pred = svm.predict(KKval)
                acc = np.mean(y_pred == y_val)
                if acc > best_acc:
                    best_acc = acc
                    best_C = Cval
                    best_sigma = sigma_val
                    best_iteration = iteration
                    best_learning_rate = learning_rate

print "For gussian kernel: "
print "the best acc is ", best_acc
print "the best c is ", best_C
print "the best sigma is ", best_sigma
print "the best learning rate is ", best_learning_rate
print "the best iteration is ", best_iteration
'''
    K = rbf_kernel(X, X, gamma=1 / (2 * sigma ** 2)).reshape(X.shape[0], X.shape[0])
    scaler = preprocessing.StandardScaler().fit(K)
    scaleK = scaler.transform(K)
    KK = np.vstack([np.ones((scaleK.shape[0],)), scaleK]).T

    Kval = rbf_kernel(Xval, X, gamma=1 / (2 * sigma ** 2)).reshape(Xval.shape[0], X.shape[0])
    scaleKval = scaler.transform(Kval)
    KKval = np.vstack([np.ones((scaleKval.shape[0],)), scaleKval.T]).T
    print "Done!"
    for C in Cvals:
        print "sigma=", sigma, ", C=", C
        sys.stdout.flush()
        svm.theta = np.zeros((KK.shape[1],))
        svm.train(KK, yy, learning_rate=lr, C=C, num_iters=it)

        y_train_pred = svm.predict(KK)
        acc = metrics.accuracy_score(yy, y_train_pred)
        print "C=", C, "sigma=", sigma, "TrainAccuracy:", acc

        y_pred = svm.predict(KKval)
        acc = metrics.accuracy_score(yyval, y_pred)
        print "C=", C, "sigma=", sigma, "ValAccuracy:", acc
        if acc > best_acc:
            best_acc = acc
            best_C = C
            best_sigma = sigma

print "Best C is ", best_C, ", best sigma is ", best_sigma
sys.stdout.flush()

svm = LinearSVM_twoclass()
Example #4
0
############################################################################
# TODO                                                                     #
# You will change this line to vary C.                                     #
############################################################################

C = 100.

############################################################################

svm = LinearSVM_twoclass()
svm.theta = np.zeros((XX.shape[1],))
svm.train(XX,yy,learning_rate=1e-4,C=C,num_iters=50000,verbose=True)

# classify the training data

y_pred = svm.predict(XX)

print "Accuracy on training data = ", metrics.accuracy_score(yy,y_pred)

# visualize the decision boundary

utils.plot_decision_boundary(scaleX,y,svm,'x1','x2',['neg','pos'])
plt.savefig('fig2.pdf')

############################################################################
#  Part  3: Training SVM with a kernel                                     #
#  We train an SVM with an RBF kernel on the data set and the plot the     #
#  learned decision boundary                                               #
############################################################################

# test your Gaussian kernel implementation
Example #5
0
best_C = 0.1
best_lr = 1e-1
best_it = 10000

svm.train(X, yy, learning_rate=1e-1, C=best_C, num_iters=best_it, verbose=True)

#############################################################################
#  end of your code                                                         #
#############################################################################

#############################################################################
# what is the accuracy of the best model on the training data itself?       #
#############################################################################
# 2 lines of code expected

y_pred = svm.predict(X)
print "Accuracy of model on training data is: ", metrics.accuracy_score(yy, y_pred)


#############################################################################
# what is the accuracy of the best model on the test data?                  #
#############################################################################
# 2 lines of code expected


yy_test = np.ones(y_test.shape)
yy_test[y_test == 0] = -1
test_pred = svm.predict(X_test)
print "Accuracy of model on test data is: ", metrics.accuracy_score(yy_test, test_pred)

Example #6
0
best_acc = 0
for sigma_val in sigma_vals:
    K = np.array([utils.gaussian_kernel(x1,x2,sigma_val) for x1 in X_train for x2 in X_train]).reshape(X_train.shape[0],X_train.shape[0])
    scaler = preprocessing.StandardScaler().fit(K)
    scaleK = scaler.transform(K)
    KK = np.hstack([np.ones((scaleK.shape[0],1)),scaleK])
    Kval = np.array([utils.gaussian_kernel(x1,x2,sigma_val) for x1 in X_val for x2 in X_train]).reshape(X_val.shape[0],X_train.shape[0])
    scaleKval = scaler.transform(Kval)
    KKval = np.hstack([np.ones((scaleKval.shape[0],1)),scaleKval])
    for Cval in Cvals:
        for learning_rate in learning_rates:
            for iteration in iterations:
                svm = LinearSVM_twoclass()
                svm.theta = np.zeros((KK.shape[1],))
                svm.train(KK,y_train,learning_rate=learning_rate,C=Cval,num_iters=iteration,verbose=True)
                y_pred = svm.predict(KKval)
                acc = np.mean(y_pred == y_val)
                if acc > best_acc:
                    best_acc = acc
                    best_C=Cval
                    best_sigma = sigma_val
                    best_iteration = iteration
                    best_learning_rate = learning_rate

print "For gussian kernel: "
print "the best acc is ", best_acc
print "the best c is ",  best_C
print "the best sigma is ", best_sigma
print "the best learning rate is ", best_learning_rate
print "the best iteration is ", best_iteration
Example #7
0
X_train, X_val, y_train, y_val = train_test_split(X, yy, test_size=0.2)

iters = list(np.array(range(1601)) * 5)
trace = {}
for sigma in [10]:
    trace[sigma] = []
    K = metrics.pairwise.rbf_kernel(X_train, X_train, 1 / sigma ** 2)
    scaler = preprocessing.StandardScaler().fit(K)
    scaleK = scaler.transform(K)
    KK = np.vstack([np.ones((scaleK.shape[0],)), scaleK]).T
    Kval = metrics.pairwise.rbf_kernel(X_val, X_train, 1 / sigma ** 2)
    scaler_val = preprocessing.StandardScaler().fit(Kval)
    scaleKval = scaler_val.transform(Kval)
    KKval = np.vstack([np.ones((scaleKval.shape[0],)), scaleKval.T]).T
    svm.theta = np.zeros((KK.shape[1],))
    trace[sigma].append(metrics.accuracy_score(y_val, svm.predict(KKval)))
    for i in range(0, 1600):
        svm.train(KK, y_train, learning_rate=0.01, C=2, num_iters=5, verbose=True)
        a = metrics.accuracy_score(y_train, svm.predict(KK))
        b = metrics.accuracy_score(y_val, svm.predict(KKval))
        trace[sigma].append(a)
        print ("%.2f,%d,%f,%f\d" % (sigma, i, a, b))
    plt.plot(iters, trace[sigma], label="sigma=%.2f" % sigma)
plt.xlabel("iterations")
plt.ylabel("Accurancy on validation set")
plt.legend(bbox_to_anchor=(0.9, 0.9))
plt.savefig("kernal_sigma.pdf")


# svm.train(X,yy,learning_rate=0.1,C=0.1,num_iters=2000,verbose=True)
Example #8
0
<<<<<<< HEAD
C = 100.0
=======
C = 1
>>>>>>> 89dd6a53aa0ff700b713b57c5d8d001424557b1d

############################################################################

svm = LinearSVM_twoclass()
svm.theta = np.zeros((XX.shape[1],))
svm.train(XX,yy,learning_rate=1e-4,C=C,num_iters=50000,verbose=False)

# classify the training data

y_pred = svm.predict(XX)

print "Accuracy on training data = ", metrics.accuracy_score(yy,y_pred)

# visualize the decision boundary

utils.plot_decision_boundary(scaleX,y,svm,'x1','x2',['neg','pos'])
plt.savefig('fig2.pdf')

############################################################################
#  Part  3: Training SVM with a kernel                                     #
#  We train an SVM with an RBF kernel on the data set and the plot the     #
#  learned decision boundary                                               #
############################################################################

# test your Gaussian kernel implementation