Ejemplo n.º 1
0
def main():
    m = 350
    random.seed(2)
    X = np.empty([m, 2])
    X[:, 0] = np.matrix((random.sample(range(-10000, 10000), m))) / float(1000)
    X[:, 1] = np.matrix((random.sample(range(-10000, 10000), m))) / float(1000)

    #not separable
    y = np.empty([m, 1])
    for i in range(X.shape[0]):
        y[i] = func2(X[i, :])

    #plot data and decision surface
    ax = pu.plot_data(X, y)
    pu.plot_surface(X, y, X[:, 0], X[:, 1], disc_func=func, ax=ax)
    plt.show()

    #train svm
    #change c to hard/soft margins
    w, w0, support_vectors_idx = svm.train(X, y, c=99999, eps=0.1)

    #plot result
    predicted_labels = svm.classify_all(X, w, w0)
    print("Accuracy: {}".format(svm.getAccuracy(y, predicted_labels)))

    ax = pu.plot_data(X, y, support_vectors_idx)
    pu.plot_surfaceSVM(X[:, 0], X[:, 1], w, w0, ax=ax)
    plt.show()
def main():
    m=350
    random.seed(2)
    X = np.empty([m,2])
    X[:,0] = np.matrix((random.sample(range(-10000, 10000), m))) / float(1000)
    X[:,1] = np.matrix((random.sample(range(-10000, 10000), m))) / float(1000)

    #not separable
    y = np.empty([m,1])
    for i in range(X.shape[0]):
        y[i] = func2(X[i,:])


    #plot data and decision surface
    ax = pu.plot_data(X,y)
    pu.plot_surface(X,y, X[:, 0], X[:,1], disc_func=func, ax=ax)
    plt.show()

    #train svm
    #change c to hard/soft margins
    w,w0, support_vectors_idx = svm.train(X,y,c=99999,eps=0.1)

    #plot result
    predicted_labels = svm.classify_all(X,w,w0)
    print("Accuracy: {}".format(svm.getAccuracy(y,predicted_labels)))


    ax = pu.plot_data(X,y, support_vectors_idx)
    pu.plot_surfaceSVM(X[:,0], X[:,1], w,w0, ax=ax)
    plt.show()
Ejemplo n.º 3
0
def main():
    data = pandas.read_csv("Data/car.data", sep=",", header=0, index_col=False)
    data = pandas.get_dummies(data)
    arr = data.as_matrix()
    use = [k for k in range(arr.shape[0]) if (arr[k, 0] == -1 or arr[k, 0] == 1)]
    arr = arr[use]
    X = arr[:, range(1, 22)]
    y = arr[:, 0]

    # normalize
    # X = preprocessing.scale(X)
    # shuffle
    p = np.random.permutation(len(X))

    X = X[p]
    y = y[p]

    # train svm
    w, w0, support_vectors_idx = svm.train(X, y, c=99, eps=0.00001)

    # get accuracy
    predicted_labels = svm.classify_all(X, w, w0)
    print("Accuracy: {}".format(svm.getAccuracy(y, predicted_labels)))
    #
    # evaluate performance
    kfold = svm.kfoldCrossValidation(X, y, 10, 1, c=99, eps=0.00001)
    print(kfold)

    # evaluate performance with gaussina kernel function
    kfold = svm.kfoldCrossValidation(X, y, 10, 1, c=99, eps=0.00001, type="gaussian")
    print(kfold)

    # evaluate performance with polynomial kernel function
    kfold = svm.kfoldCrossValidation(X, y, 10, 1, c=99, eps=0.00001, type="polynomial")
    print(kfold)
Ejemplo n.º 4
0
def main():

    dic = {'a':0, 'b':1, '?':-1}
    data = np.genfromtxt('Data/credits.data', skip_header=True, delimiter=',',
                         usecols=[0,1,2,7,10,15],converters={0: lambda s: dic[s]})

    use = [k for k in range(len(data)) if data[k][0] != -1 and (not math.isnan(data[k][1]))]
    data = data[use]

    X = np.empty([len(data),5])
    y = np.empty([len(data), 1])
    for i in range(len(data)):

        for j in range(len(data[i])-1):
            X[i,j] = data[i][j]
        y[i] = data[i][5]



    # preprocessing.scale(X[:,1])
    #train svm
    w,w0, support_vectors_idx = svm.train(X[:,range(1,5)],y,c=10, eps=1)

    #plot result
    predicted_labels = svm.classify_all(X[:,range(1,5)],w,w0)
    print("Accuracy: {}".format(svm.getAccuracy(y,predicted_labels)))
Ejemplo n.º 5
0
def main():
    m=100
    X = np.empty([m,2])
    X[:,0] = np.matrix((random.sample(range(-10000, 10000), m))) / float(1000)
    X[:,1] = np.matrix((random.sample(range(-10000, 10000), m))) / float(1000)

    # preprocessing.scale(X)

    #linearly separable
    y = np.empty([m,1])
    for i in range(m):
        y[i] = func(X[i,])

    #plot data and decision surface
    ax = pu.plot_data(X,y)
    pu.plot_surface(X,y, X[:, 0], X[:,1], disc_func=func, ax=ax)
    plt.show()

    #train svm

    w,w0, support_vectors_idx = svm.train(X,y,c=999999999999999, eps=10, type='gaussian')
    # w, w0, support_vectors_idx = svm.train(X, y, c=999999999999999, eps=10, type='polynomial')
    #plot result
    predicted_labels = svm.classify_all(X,w,w0)
    print("Accuracy: {}".format(svm.getAccuracy(y,predicted_labels)))


    ax = pu.plot_data(X,y, support_vectors_idx)
    pu.plot_surfaceSVM(X[:,0], X[:,1], w,w0, ax=ax)
    plt.show()
Ejemplo n.º 6
0
def main():
    data = pandas.read_csv('Data/credits.data', sep=',', header=0, index_col=False)
    data = pandas.get_dummies(data)
    arr = data.as_matrix()
    X = arr[:,range(0,6) + range(7,47)]
    y = arr[:,6]

    # shuffle
    p = np.random.permutation(len(X))
    X = X[p]
    y = y[p]

    #train svm
    # w,w0, support_vectors_idx = svm.train(X[:,[0,1,2,3,4,5,6,7]],y,c=999, eps=0.000001)
    w, w0, support_vectors_idx = svm.train(X, y, c=99999, eps=0.000000001)
    #plot result
    predicted_labels = svm.classify_all(X,w,w0)
    print("Accuracy: {}".format(svm.getAccuracy(y,predicted_labels)))

    kfold = svm.kfoldCrossValidation(X, y, 10, 1, c=99, eps=0.00001)
    print (kfold)
def main():
    m=150
    random.seed(2)
    X = np.empty([m,2])
    X[:,0] = np.matrix((random.sample(range(-10000, 10000), m))) / float(1000)
    X[:,1] = np.matrix((random.sample(range(-10000, 10000), m))) / float(1000)

    preprocessing.scale(X)



    #linearly separable
    y = np.empty([m,1])
    for i in range(m):
        y[i] = func(X[i,])

    # shuffle
    p = np.random.permutation(len(X))
    X = X[p]
    y = y[p]

    #plot data and decision surface
    ax = pu.plot_data(X,y)
    pu.plot_surface(X,y, X[:, 0], X[:,1], disc_func=func, ax=ax)
    plt.show()

    #train svm
    w,w0, support_vectors_idx = svm.train(X,y,c=9999, eps=0.000001)

    #plot result
    predicted_labels = svm.classify_all(X,w,w0)
    print("Accuracy: {}".format(svm.getAccuracy(y,predicted_labels)))

    kfold = svm.kfoldCrossValidation(X,y,10,1,c=999999999,eps=0.000001)
    print (kfold)

    ax = pu.plot_data(X,y, support_vectors_idx)
    pu.plot_surfaceSVM(X[:,0], X[:,1], w,w0, ax=ax)
    plt.show()
def main():
    m = 150
    random.seed(2)
    X = np.empty([m, 2])
    X[:, 0] = np.matrix((random.sample(range(-10000, 10000), m))) / float(1000)
    X[:, 1] = np.matrix((random.sample(range(-10000, 10000), m))) / float(1000)

    preprocessing.scale(X)

    #linearly separable
    y = np.empty([m, 1])
    for i in range(m):
        y[i] = func(X[i, ])

    # shuffle
    p = np.random.permutation(len(X))
    X = X[p]
    y = y[p]

    #plot data and decision surface
    ax = pu.plot_data(X, y)
    pu.plot_surface(X, y, X[:, 0], X[:, 1], disc_func=func, ax=ax)
    plt.show()

    #train svm
    w, w0, support_vectors_idx = svm.train(X, y, c=9999, eps=0.000001)

    #plot result
    predicted_labels = svm.classify_all(X, w, w0)
    print("Accuracy: {}".format(svm.getAccuracy(y, predicted_labels)))

    kfold = svm.kfoldCrossValidation(X, y, 10, 1, c=999999999, eps=0.000001)
    print(kfold)

    ax = pu.plot_data(X, y, support_vectors_idx)
    pu.plot_surfaceSVM(X[:, 0], X[:, 1], w, w0, ax=ax)
    plt.show()