Example #1
0
def main():
    m = 350
    random.seed(2)
    X = np.empty([m, 2])
    X[:, 0] = np.matrix((random.sample(range(-10000, 10000), m))) / float(1000)
    X[:, 1] = np.matrix((random.sample(range(-10000, 10000), m))) / float(1000)

    #not separable
    y = np.empty([m, 1])
    for i in range(X.shape[0]):
        y[i] = func2(X[i, :])

    #plot data and decision surface
    ax = pu.plot_data(X, y)
    pu.plot_surface(X, y, X[:, 0], X[:, 1], disc_func=func, ax=ax)
    plt.show()

    #train svm
    #change c to hard/soft margins
    w, w0, support_vectors_idx = svm.train(X, y, c=99999, eps=0.1)

    #plot result
    predicted_labels = svm.classify_all(X, w, w0)
    print("Accuracy: {}".format(svm.getAccuracy(y, predicted_labels)))

    ax = pu.plot_data(X, y, support_vectors_idx)
    pu.plot_surfaceSVM(X[:, 0], X[:, 1], w, w0, ax=ax)
    plt.show()
Example #2
0
def main():
    m=100
    X = np.empty([m,2])
    X[:,0] = np.matrix((random.sample(range(-10000, 10000), m))) / float(1000)
    X[:,1] = np.matrix((random.sample(range(-10000, 10000), m))) / float(1000)

    # preprocessing.scale(X)

    #linearly separable
    y = np.empty([m,1])
    for i in range(m):
        y[i] = func(X[i,])

    #plot data and decision surface
    ax = pu.plot_data(X,y)
    pu.plot_surface(X,y, X[:, 0], X[:,1], disc_func=func, ax=ax)
    plt.show()

    #train svm

    w,w0, support_vectors_idx = svm.train(X,y,c=999999999999999, eps=10, type='gaussian')
    # w, w0, support_vectors_idx = svm.train(X, y, c=999999999999999, eps=10, type='polynomial')
    #plot result
    predicted_labels = svm.classify_all(X,w,w0)
    print("Accuracy: {}".format(svm.getAccuracy(y,predicted_labels)))


    ax = pu.plot_data(X,y, support_vectors_idx)
    pu.plot_surfaceSVM(X[:,0], X[:,1], w,w0, ax=ax)
    plt.show()
def main():
    m=350
    random.seed(2)
    X = np.empty([m,2])
    X[:,0] = np.matrix((random.sample(range(-10000, 10000), m))) / float(1000)
    X[:,1] = np.matrix((random.sample(range(-10000, 10000), m))) / float(1000)

    #not separable
    y = np.empty([m,1])
    for i in range(X.shape[0]):
        y[i] = func2(X[i,:])


    #plot data and decision surface
    ax = pu.plot_data(X,y)
    pu.plot_surface(X,y, X[:, 0], X[:,1], disc_func=func, ax=ax)
    plt.show()

    #train svm
    #change c to hard/soft margins
    w,w0, support_vectors_idx = svm.train(X,y,c=99999,eps=0.1)

    #plot result
    predicted_labels = svm.classify_all(X,w,w0)
    print("Accuracy: {}".format(svm.getAccuracy(y,predicted_labels)))


    ax = pu.plot_data(X,y, support_vectors_idx)
    pu.plot_surfaceSVM(X[:,0], X[:,1], w,w0, ax=ax)
    plt.show()
def main():
    m=150
    random.seed(2)
    X = np.empty([m,2])
    X[:,0] = np.matrix((random.sample(range(-10000, 10000), m))) / float(1000)
    X[:,1] = np.matrix((random.sample(range(-10000, 10000), m))) / float(1000)

    preprocessing.scale(X)



    #linearly separable
    y = np.empty([m,1])
    for i in range(m):
        y[i] = func(X[i,])

    # shuffle
    p = np.random.permutation(len(X))
    X = X[p]
    y = y[p]

    #plot data and decision surface
    ax = pu.plot_data(X,y)
    pu.plot_surface(X,y, X[:, 0], X[:,1], disc_func=func, ax=ax)
    plt.show()

    #train svm
    w,w0, support_vectors_idx = svm.train(X,y,c=9999, eps=0.000001)

    #plot result
    predicted_labels = svm.classify_all(X,w,w0)
    print("Accuracy: {}".format(svm.getAccuracy(y,predicted_labels)))

    kfold = svm.kfoldCrossValidation(X,y,10,1,c=999999999,eps=0.000001)
    print (kfold)

    ax = pu.plot_data(X,y, support_vectors_idx)
    pu.plot_surfaceSVM(X[:,0], X[:,1], w,w0, ax=ax)
    plt.show()
def main():
    m = 150
    random.seed(2)
    X = np.empty([m, 2])
    X[:, 0] = np.matrix((random.sample(range(-10000, 10000), m))) / float(1000)
    X[:, 1] = np.matrix((random.sample(range(-10000, 10000), m))) / float(1000)

    preprocessing.scale(X)

    #linearly separable
    y = np.empty([m, 1])
    for i in range(m):
        y[i] = func(X[i, ])

    # shuffle
    p = np.random.permutation(len(X))
    X = X[p]
    y = y[p]

    #plot data and decision surface
    ax = pu.plot_data(X, y)
    pu.plot_surface(X, y, X[:, 0], X[:, 1], disc_func=func, ax=ax)
    plt.show()

    #train svm
    w, w0, support_vectors_idx = svm.train(X, y, c=9999, eps=0.000001)

    #plot result
    predicted_labels = svm.classify_all(X, w, w0)
    print("Accuracy: {}".format(svm.getAccuracy(y, predicted_labels)))

    kfold = svm.kfoldCrossValidation(X, y, 10, 1, c=999999999, eps=0.000001)
    print(kfold)

    ax = pu.plot_data(X, y, support_vectors_idx)
    pu.plot_surfaceSVM(X[:, 0], X[:, 1], w, w0, ax=ax)
    plt.show()