示例#1
0
文件: svm.py 项目: sebobo233/CI
def ex_2_c(x_train, y_train, x_test, y_test):
    """
    Solution for exercise 2 c)
    :param x_train: Training samples (2-dimensional)
    :param y_train: Training labels
    :param x_test: Testing samples (2-dimensional)
    :param y_test: Testing labels
    :return:
    """
    ###########
    ## TODO:
    ## Train SVMs with RBF kernels for different values of the gamma
    ## and plot the variation of the test and training scores with gamma using 'plot_score_vs_gamma' function.
    ## Plot the decision boundary and support vectors for the best value of gamma
    ## using 'plot_svm_decision_boundary' function
    ###########
    gammas = np.arange(0.01, 2, 0.02)

    train_scores = np.zeros(np.size(gammas))
    test_scores = np.zeros(np.size(gammas))

    for j in range(np.size(gammas)):
        svc = svm.SVC(kernel='rbf', gamma=gammas[j]).fit(x_train, y_train)
        test_scores[j] = svc.score(x_test, y_test)
        train_scores[j] = svc.score(x_train, y_train)

    plot_score_vs_gamma(train_scores, test_scores, gammas)

    acc_max = test_scores.argmax()
    svc = svm.SVC(kernel='rbf', gamma=gammas[acc_max]).fit(x_train, y_train)
    plot_svm_decision_boundary(svc, x_train, y_train, x_test, y_test)
示例#2
0
文件: svm.py 项目: sugano-nu/uni17ci
def ex_1_c(x, y):
    """
    Solution for exercise 1 c)
    :param x: The x values
    :param y: The y values
    :return:
    """
    ###########
    ## TODO:
    ## Add a point (4,0) with label 1 to the data set and then

    x_new = np.vstack((x, (4, 0)))
    y_new = np.hstack((y, 1))

    ## train an SVM with a linear kernel with different values of C
    ## and plot the decision boundary and support vectors  for each using 'plot_svm_decision_boundary' function
    ###########
    Cs = [1e6, 1, 0.1, 0.001]

    for C in Cs:
        clf = svm.SVC(kernel="linear", C=C)
        clf.fit(x_new, y_new)
        print("Support Vectors (C=" + str(C) + "): " +
              str(len(clf.support_vectors_)))
        plot_svm_decision_boundary(clf, x_new, y_new)
示例#3
0
def ex_1_c(x, y):
    """
    Solution for exercise 1 c)
    :param x: The x values
    :param y: The y values
    :return:
    """
    ###########
    ## TODO:
    ## Add a point (4,0) with label 1 to the data set and then
    ## train an SVM with a linear kernel with different values of C
    ## and plot the decision boundary and support vectors  for each using 'plot_svm_decision_boundary' function
    ###########
    Cs = [1e6, 1, 0.1, 0.001]

    point_x = [4, 0]
    point_y = [1]
    x_extended = np.vstack((x, point_x))
    y_extended = np.hstack((y, point_y))
    for i in Cs:
        clf = svm.SVC(kernel='linear', C=i)
        clf.fit(x_extended, y_extended)
        plot_svm_decision_boundary(clf, x_extended, y_extended)

    pass
示例#4
0
def ex_1_c(x, y):
    """
    Solution for exercise 1 c)
    :param x: The x values
    :param y: The y values
    :return:
    """
    ###########
    ## TODO:
    ## Add a point (4,0) with label 1 to the data set and then
    ## train an SVM with a linear kernel with different values of C
    ## and plot the decision boundary and support vectors  for each using 'plot_svm_decision_boundary' function
    ###########

    # helper vars
    m = 0
    kernel_mode = 'linear'

    # given C values
    Cs = [1e6, 1, 0.1, 0.001]

    # adding point (4, 0) with label 1
    x_new = np.vstack((x, [4, -4]))
    y_new = np.hstack((y, 1))

    # for loop over all Cs values
    for m in range(len(Cs)):
        # init linear svm
        lin_svm = svm.SVC(kernel=kernel_mode, C=Cs[m])

        # train linear svm
        lin_svm.fit(x_new, y_new)

        # plotting the decision boundary
        plot_svm_decision_boundary(lin_svm, x_new, y_new)
示例#5
0
def ex_1_c(x, y):
    """
    Solution for exercise 1 c)
    :param x: The x values
    :param y: The y values
    :return:
    """
    ###########
    ## TODO:
    ## Add a point (4,0) with label 1 to the data set and then
    ## train an SVM with a linear kernel with different values of C
    ## and plot the decision boundary and support vectors  for each using 'plot_svm_decision_boundary' function
    ###########
    Cs = [1e6, 1, 0.1, 0.001]

    new_x = (4, 0)
    new_y = 1

    x = np.vstack([x, new_x])
    y = np.hstack([y, new_y])

    for C in Cs:
        clf = svm.SVC(kernel='linear', C=C)
        clf.fit(x, y)
        plot_svm_decision_boundary(clf, x, y)
示例#6
0
def ex_2_c(x_train, y_train, x_test, y_test):
    """
    Solution for exercise 2 c)
    :param x_train: Training samples (2-dimensional)
    :param y_train: Training labels
    :param x_test: Testing samples (2-dimensional)
    :param y_test: Testing labels
    :return:
    """
    ###########
    ## TODO:
    ## Train SVMs with RBF kernels for different values of the gamma
    ## and plot the variation of the test and training scores with gamma using 'plot_score_vs_gamma' function.
    ## Plot the decision boundary and support vectors for the best value of gamma
    ## using 'plot_svm_decision_boundary' function
    ###########

    # given gamma values
    gammas = np.arange(0.01, 2, 0.02)

    # helper variables
    m = 0
    coef_value = 1
    kernel_mode = 'rbf'

    all_train_scores = []
    temp_train = 0
    all_test_scores = []
    temp_test = 0
    temp_highscore = 0
    top_gamma = 0

    # for loop over all 20 degrees, saving all results inbetween
    for m in gammas:
        # init non-linear svm
        rbf_svm = svm.SVC(kernel=kernel_mode, gamma=m)
        # train non-linear svm
        rbf_svm.fit(x_train, y_train)
        # calc scores
        temp_train = rbf_svm.score(x_train, y_train)
        temp_test = rbf_svm.score(x_test, y_test)
        # update highscore
        if (temp_test > temp_highscore):
            temp_highscore = temp_test
            top_gamma = m
        # save scores
        all_train_scores.append(temp_train)
        all_test_scores.append(temp_test)

    #
    print("top gamma: ", top_gamma)
    print("top score: ", temp_highscore)

    # plotting scores
    plot_score_vs_gamma(all_train_scores, all_test_scores, gammas)

    # recreate best svm and plotting it
    best_svm = svm.SVC(kernel=kernel_mode, gamma=top_gamma)
    best_svm.fit(x_train, y_train)
    plot_svm_decision_boundary(best_svm, x_train, y_train, x_test, y_test)
示例#7
0
def ex_1_b(x, y):
    """
    Solution for exercise 1 b)
    :param x: The x values
    :param y: The y values
    :return:
    """
    ###########
    ## TODO:
    ## Add a point (4,0) with label 1 to the data set and then
    ## train an SVM with a linear kernel
    ## and plot the decision boundary and support vectors using 'plot_svm_decision_boundary' function
    ###########
    # adding point (4, 0) with label 1
    x_new = np.vstack((x, [4, 0]))
    y_new = np.hstack((y, 1))

    # init linear svm
    kernel_mode = 'linear'
    lin_svm = svm.SVC(kernel=kernel_mode)

    # train linear svm
    lin_svm.fit(x_new, y_new)

    # plotting the decision boundary
    plot_svm_decision_boundary(lin_svm, x_new, y_new)
示例#8
0
def ex_2_a(x_train, y_train, x_test, y_test):
    """
    Solution for exercise 2 a)
    :param x_train: Training samples (2-dimensional)
    :param y_train: Training labels
    :param x_test: Testing samples (2-dimensional)
    :param y_test: Testing labels
    :return:
    """
    ###########
    ## TODO:
    ## Train an SVM with a linear kernel for the given dataset
    ## and plot the decision boundary and support vectors  for each using 'plot_svm_decision_boundary' function
    ###########

    linSVM = svm.SVC(kernel="linear")
    linSVM.fit(x_train, y_train)

    train_score = linSVM.score(x_train, y_train)
    test_score = linSVM.score(x_test, y_test)
    print("lin nSV:", linSVM.support_vectors_.shape)
    print("train_score for linear kernel: ", train_score)
    print("test_score for linear kernel: ", test_score)

    plot_svm_decision_boundary(linSVM, x_train, y_train, x_test, y_test)
示例#9
0
def ex_2_a(x_train, y_train, x_test, y_test):
    """
    Solution for exercise 2 a)
    :param x_train: Training samples (2-dimensional)
    :param y_train: Training labels
    :param x_test: Testing samples (2-dimensional)
    :param y_test: Testing labels
    :return:
    """
    ###########
    ## TODO:
    ## Train an SVM with a linear kernel for the given dataset
    ## and plot the decision boundary and support vectors  for each using 'plot_svm_decision_boundary' function
    ###########

    # init linear svm
    kernel_mode = 'linear'
    lin_svm = svm.SVC(kernel=kernel_mode)
    # train linear svm
    lin_svm.fit(x_train, y_train)

    # calc. & print svc.score() = mean accuracy of classification
    lin_svm_score = lin_svm.score(x_test, y_test)
    print("lin_swm_score: ", lin_svm_score)

    # plotting the decision boundary
    plot_svm_decision_boundary(lin_svm, x_train, y_train, x_test, y_test)
示例#10
0
def ex_2_b(x_train, y_train, x_test, y_test):
    """
    Solution for exercise 2 b)
    :param x_train: Training samples (2-dimensional)
    :param y_train: Training labels
    :param x_test: Testing samples (2-dimensional)
    :param y_test: Testing labels
    :return:
    """
    ###########
    ## TODO:
    ## Train SVMs with polynomial kernels for different values of the degree
    ## (Remember to set the 'coef0' parameter to 1)
    ## and plot the variation of the test and training scores with polynomial degree using 'plot_score_vs_degree' func.
    ## Plot the decision boundary and support vectors for the best value of degree
    ## using 'plot_svm_decision_boundary' function
    ###########
    degrees = range(1, 20)
    scorepolylist_test = np.zeros(np.array(degrees).shape[0])
    scorepolylist_train = np.zeros(np.array(degrees).shape[0])
    for deg in degrees:
        SVMpoly = svm.SVC(kernel='poly', coef0=1, degree=deg)
        SVMpoly.fit(x_train, y_train)
        scorepolylist_test[deg - 1] = SVMpoly.score(x_test, y_test)
        scorepolylist_train[deg - 1] = SVMpoly.score(x_train, y_train)

    max_score_index = np.argmax(scorepolylist_test)
    optimal_deg = max_score_index + 1
    SVMpolyopt = svm.SVC(kernel='poly', coef0=1, degree=optimal_deg)
    SVMpolyopt.fit(x_train, y_train)
    plot_score_vs_degree(scorepolylist_train, scorepolylist_test, degrees)
    plot_svm_decision_boundary(SVMpolyopt, x_train, y_train, x_test, y_test)
    print("Optimal degree", optimal_deg, "Optimal score",
          scorepolylist_test[max_score_index])
示例#11
0
def ex_1_b(x, y):
    """
    Solution for exercise 1 b)
    :param x: The x values
    :param y: The y values
    :return:
    """
    ###########
    ## TODO:
    ## Add a point (4,0) with label 1 to the data set and then
    ## train an SVM with a linear kernel
    ## and plot the decision boundary and support vectors using 'plot_svm_decision_boundary' function
    ###########
    #print("x:", x)
    #print("y:", y)
    #print("x.shape:", x.shape)
    #print("y.shape:", y.shape)
    point = np.array([4, 0])
    #print("point:", point)
    #print("point_shape:", point.shape)
    x_new = np.vstack((x, point))
    #print("x_new:", x_new)
    #print("x_new.shape:", x_new.shape)
    y_new = np.hstack((y, 1))
    #print("y_new:", y_new)
    #print("y_new.shape:", y_new.shape)
    linSVM = svm.SVC(kernel="linear")
    linSVM.fit(x_new, y_new)
    plot_svm_decision_boundary(linSVM, x_new, y_new)
示例#12
0
def ex_2_b(x_train, y_train, x_test, y_test):
    """
    Solution for exercise 2 b)
    :param x_train: Training samples (2-dimensional)
    :param y_train: Training labels
    :param x_test: Testing samples (2-dimensional)
    :param y_test: Testing labels
    :return:
    """
    ###########
    ## TODO:
    ## Train SVMs with polynomial kernels for different values of the degree
    ## (Remember to set the 'coef0' parameter to 1)
    ## and plot the variation of the test and training scores with polynomial degree using 'plot_score_vs_degree' func.
    ## Plot the decision boundary and support vectors for the best value of degree
    ## using 'plot_svm_decision_boundary' function
    ###########

    degrees = range(1, 21)
    machines = [svm.SVC(kernel='poly', degree=d, coef0=1.0) for d in degrees]

    for machine in machines:
        machine.fit(x_train, y_train)

    trainScores = [machine.score(x_train, y_train) for machine in machines]
    testScores = [machine.score(x_test, y_test) for machine in machines]

    plot_score_vs_degree(trainScores, testScores, degrees)

    bestDegree = testScores.index(max(testScores))
    print('Score of best polynomial degree ({}): {}'.format(
        bestDegree + 1, testScores[bestDegree]))
    plot_svm_decision_boundary(machines[bestDegree], x_train, y_train, x_test,
                               y_test)
示例#13
0
def ex_2_c(x_train, y_train, x_test, y_test):
    """
    Solution for exercise 2 c)
    :param x_train: Training samples (2-dimensional)
    :param y_train: Training labels
    :param x_test: Testing samples (2-dimensional)
    :param y_test: Testing labels
    :return:
    """
    ###########
    ## TODO:
    ## Train SVMs with RBF kernels for different values of the gamma
    ## and plot the variation of the test and training scores with gamma using 'plot_score_vs_gamma' function.
    ## Plot the decision boundary and support vectors for the best value of gamma
    ## using 'plot_svm_decision_boundary' function
    ###########
    gammas = np.arange(0.01, 2, 0.02)
    machines = [svm.SVC(kernel='rbf', gamma=g, coef0=1.0) for g in gammas]

    for machine in machines:
        machine.fit(x_train, y_train)

    trainScores = [machine.score(x_train, y_train) for machine in machines]
    testScores = [machine.score(x_test, y_test) for machine in machines]

    plot_score_vs_gamma(trainScores, testScores, gammas)

    bestGamma = np.argmax(testScores)
    print('Score of best rbf gamma ({}): {}'.format(gammas[bestGamma],
                                                    testScores[bestGamma]))
    plot_svm_decision_boundary(machines[bestGamma], x_train, y_train, x_test,
                               y_test)
示例#14
0
def ex_2_c(x_train, y_train, x_test, y_test):
    """
    Solution for exercise 2 c)
    :param x_train: Training samples (2-dimensional)
    :param y_train: Training labels
    :param x_test: Testing samples (2-dimensional)
    :param y_test: Testing labels
    :return:
    """
    ###########
    ## TODO:
    ## Train SVMs with RBF kernels for different values of the gamma
    ## and plot the variation of the test and training scores with gamma using 'plot_score_vs_gamma' function.
    ## Plot the decision boundary and support vectors for the best value of gamma
    ## using 'plot_svm_decision_boundary' function
    ###########
    gammas = np.arange(0.01, 2, 0.02)

    train_scores = list()
    test_scores = list()
    for i in gammas:
        clf = svm.SVC(kernel='rbf', gamma=i)
        clf.fit(x_train, y_train)
        test_scores.append(clf.score(x_test, y_test))
        train_scores.append(clf.score(x_train, y_train))
    plot_score_vs_gamma(train_scores, test_scores, gammas)

    j = max(test_scores)
    best_gamma = test_scores.index(j)

    clf = svm.SVC(kernel='rbf', gamma=gammas[best_gamma])
    clf.fit(x_train, y_train)
    plot_svm_decision_boundary(clf, x_train, y_train, x_test, y_test)
    print(clf.score(x_test, y_test))
    print(gammas[best_gamma])
示例#15
0
文件: svm.py 项目: sebobo233/CI
def ex_2_b(x_train, y_train, x_test, y_test):
    """
    Solution for exercise 2 b)
    :param x_train: Training samples (2-dimensional)
    :param y_train: Training labels
    :param x_test: Testing samples (2-dimensional)
    :param y_test: Testing labels
    :return:
    """
    ###########
    ## TODO:
    ## Train SVMs with polynomial kernels for different values of the degree
    ## (Remember to set the 'coef0' parameter to 1)
    ## and plot the variation of the test and training scores with polynomial degree using 'plot_score_vs_degree' func.
    ## Plot the decision boundary and support vectors for the best value of degree
    ## using 'plot_svm_decision_boundary' function
    ###########

    degrees = range(1, 20)

    train_scores = np.zeros(np.size(degrees))
    test_scores = np.zeros(np.size(degrees))

    for j in range(np.size(degrees)):
        svc = svm.SVC(kernel='poly', C=1, coef0=1,
                      degree=degrees[j]).fit(x_train, y_train)
        test_scores[j] = svc.score(x_test, y_test)
        train_scores[j] = svc.score(x_train, y_train)

    plot_score_vs_degree(train_scores, test_scores, degrees)

    acc_max = test_scores.argmax()
    svc = svm.SVC(kernel='poly', C=1, coef0=1,
                  degree=degrees[acc_max]).fit(x_train, y_train)
    plot_svm_decision_boundary(svc, x_train, y_train, x_test, y_test)
示例#16
0
def ex_1_a(x, y):
    """
    Solution for exercise 1 a)
    :param x: The x values
    :param y: The y values
    :return:
    """
    # create and train the support vector machine.
    clf = svm.SVC(C=1, kernel='linear')
    clf.fit(x, y)

    # plot results
    plot_svm_decision_boundary(clf, x, y)

    pass
示例#17
0
def ex_1_a(x, y):
    """
    Solution for exercise 1 a)
    :param x: The x values
    :param y: The y values
    :return:
    """
    ###########
    # TODO: - done
    # Train an SVM with a linear kernel
    # and plot the decision boundary and support vectors using 'plot_svm_decision_boundary' function
    ###########
    svc = svm.SVC(kernel=LINEAR)
    svc.fit(x, y)
    plot_svm_decision_boundary(svc, x, y)
示例#18
0
文件: svm.py 项目: sebobo233/CI
def ex_1_a(x, y):
    """
    Solution for exercise 1 a)
    :param x: The x values
    :param y: The y values
    :return:
    """
    ###########
    ## TODO:
    ## Train an
    ## and plot the decision boundary and support vectors using 'plot_svm_decision_boundary' function
    ###########
    #pass

    svc = svm.SVC(kernel='linear', C=1).fit(x, y)
    plot_svm_decision_boundary(svc, x, y)
示例#19
0
def ex_1_a(x, y):
    """
    Solution for exercise 1 a)
    :param x: The x values
    :param y: The y values
    :return:
    """
    ###########
    ## Train an SVM with a linear kernel
    ## and plot the decision boundary and support vectors using 'plot_svm_decision_boundary' function
    ###########

    clf = svm.SVC(kernel='linear')
    clf.fit(x, y)
    plot_svm_decision_boundary(clf, x, y)
    pass
示例#20
0
def ex_1_a(x, y):
    """
    Solution for exercise 1 a)
    :param x: The x values
    :param y: The y values
    :return:
    """
    ###########
    ## TODO:
    ## Train an SVM with a linear kernel
    ## and plot the decision boundary and support vectors using 'plot_svm_decision_boundary' function
    SVM = svm.SVC(kernel='linear')
    SVM.fit(x, y)

    plot_svm_decision_boundary(SVM, x, y, x_test=None, y_test=None)

    ###########
    pass
示例#21
0
def ex_1_b(x, y):
    """
    Solution for exercise 1 b)
    :param x: The x values
    :param y: The y values
    :return:
    """
    # add the point to the datas
    x = np.vstack((x, [4, 0]))
    y = np.hstack((y, 1))

    # create and train the SVM.
    clf = svm.SVC(C=1, kernel='linear')
    clf.fit(x, y)

    # plot results
    plot_svm_decision_boundary(clf, x, y)
    pass
示例#22
0
def ex_1_a(x, y):
    """
    Solution for exercise 1 a)
    :param x: The x values
    :param y: The y values
    :return:
    """
    ###########
    ## TODO:
    ## Train an SVM with a linear kernel
    ## and plot the decision boundary and support vectors using 'plot_svm_decision_boundary' function
    ###########

    linSVM = svm.SVC(kernel="linear")
    #print("X:", x.shape)
    #print("Y:", y.shape)
    linSVM.fit(x, y)
    plot_svm_decision_boundary(linSVM, x, y)
示例#23
0
def ex_1_b(x, y):
    """
    Solution for exercise 1 b)
    :param x: The x values
    :param y: The y values
    :return:
    """
    ###########
    ## Add a point (4,0) with label 1 to the data set and then
    ## train an SVM with a linear kernel
    ## and plot the decision boundary and support vectors using 'plot_svm_decision_boundary' function
    ###########
    new_x = np.vstack((x, np.array([4, 0])))
    new_y = np.hstack((y, np.array((1))))

    clf = svm.SVC(kernel='linear')
    clf.fit(new_x, new_y)
    plot_svm_decision_boundary(clf, new_x, new_y)
    pass
示例#24
0
文件: svm.py 项目: sebobo233/CI
def ex_1_b(x, y):
    """
    Solution for exercise 1 b)
    :param x: The x values
    :param y: The y values
    :return:
    """
    ###########
    ## TODO:
    ## Add a point (4,0) with label 1 to the data set and then
    ## train an SVM with a linear kernel
    ## and plot the decisSVM with a linear kernelion boundary and support vectors using 'plot_svm_decision_boundary' function
    ###########
    #pass
    x = np.append(x, [[4, 0]], axis=0)
    y = np.append(y, 1)

    svc = svm.SVC(kernel='linear', C=1).fit(x, y)
    plot_svm_decision_boundary(svc, x, y)
示例#25
0
文件: svm.py 项目: sebobo233/CI
def ex_2_a(x_train, y_train, x_test, y_test):
    """
    Solution for exercise 2 a)
    :param x_train: Training samples (2-dimensional)
    :param y_train: Training labels
    :param x_test: Testing samples (2-dimensional)
    :param y_test: Testing labels
    :return:
    """
    ###########
    ## TODO:
    ## Train an SVM with a linear kernel for the given dataset
    ## and plot the decision boundary and support vectors  for each using 'plot_svm_decision_boundary' function
    ###########
    #pass
    svc = svm.SVC(kernel='linear', C=1).fit(x_train, y_train)
    score_svc = svc.score(x_test, y_test)
    print("Score for linear kernel SVC:", score_svc)
    plot_svm_decision_boundary(svc, x_train, y_train, x_test, y_test)
示例#26
0
def ex_2_b(x_train, y_train, x_test, y_test):
    """
    Solution for exercise 2 b)
    :param x_train: Training samples (2-dimensional)
    :param y_train: Training labels
    :param x_test: Testing samples (2-dimensional)
    :param y_test: Testing labels
    :return:
    """
    ###########
    ## TODO:
    ## Train SVMs with polynomial kernels for different values of the degree
    ## (Remember to set the 'coef0' parameter to 1)
    ## and plot the variation of the test and training scores with polynomial degree using 'plot_score_vs_degree' func.
    ## Plot the decision boundary and support vectors for the best value of degree
    ## using 'plot_svm_decision_boundary' function
    ###########
    degrees = range(1, 21)

    train_scores = []
    test_scores = []
    polySVMs = []

    for degree in degrees:
        polySVM = svm.SVC(kernel="poly", coef0=1)
        polySVM.set_params(degree=degree)
        polySVM.fit(x_train, y_train)

        train_scores.append(polySVM.score(x_train, y_train))
        test_scores.append(polySVM.score(x_test, y_test))
        polySVMs.append(polySVM)

    best_test_score_index = np.argmax(test_scores)
    print("best_train_score for poly kernel: ",
          train_scores[best_test_score_index])
    print("best_test_score for poly kernel: ",
          test_scores[best_test_score_index])
    print("degree for best test_score: ", degrees[best_test_score_index])
    plot_score_vs_degree(train_scores, test_scores, degrees)
    print("polySVm nSV: ", polySVM.support_vectors_.shape)
    plot_svm_decision_boundary(polySVMs[best_test_score_index], x_train,
                               y_train, x_test, y_test)
示例#27
0
def ex_2_b(x_train, y_train, x_test, y_test):
    """
    Solution for exercise 2 b)
    :param x_train: Training samples (2-dimensional)
    :param y_train: Training labels
    :param x_test: Testing samples (2-dimensional)
    :param y_test: Testing labels
    :return:
    """
    # parameters
    r_value = 1
    degrees = range(1, 21)
    C = 1

    # store the scores
    train_scores = []
    test_scores = []

    # store the created svm so we don't have to train the best twice.
    clfs = []

    # create and train the svm with a polynomial kernel for every d value
    for d in degrees:
        clf = svm.SVC(C=C, kernel='poly', degree=d, coef0=r_value)
        clf.fit(x_train, y_train)
        clfs.append(clf)
        # compute the scores
        train_scores.append(clf.score(x_train, y_train))
        test_scores.append(clf.score(x_test, y_test))

    # find the svm with the better test score
    max_index = test_scores.index(max(test_scores))
    clf = clfs[max_index]
    a = clf.support_vectors_
    print("best d value: {}, with an accuracy of {}".format(
        degrees[max_index], test_scores[max_index]))
    print("number of SV:", len(a))
    # plot the decision boundary on both datasets for the best svm
    plot_svm_decision_boundary(clf, x_train, y_train, x_test, y_test)

    # plot the score depending of d
    plot_score_vs_degree(train_scores, test_scores, degrees)
示例#28
0
def ex_2_c(x_train, y_train, x_test, y_test):
    """
    Solution for exercise 2 c)
    :param x_train: Training samples (2-dimensional)
    :param y_train: Training labels
    :param x_test: Testing samples (2-dimensional)
    :param y_test: Testing labels
    :return:
    """
    # parameters
    gammas = np.arange(0.01, 2, 0.02)
    C = 1

    # store the scores
    train_scores = []
    test_scores = []

    # store the created svm so we don't have to train the best twice.
    clfs = []

    # create and train the svm with a polynomial kernel for every d value
    for g in gammas:
        clf = svm.SVC(C=C, kernel='rbf', gamma=g)
        clf.fit(x_train, y_train)
        clfs.append(clf)
        # compute the scores
        train_scores.append(clf.score(x_train, y_train))
        test_scores.append(clf.score(x_test, y_test))

    # find the svm with the better test score
    max_index = test_scores.index(max(test_scores))
    clf = clfs[max_index]
    a = clf.support_vectors_
    print("best g value: {}, with an accuracy of {}".format(
        gammas[max_index], test_scores[max_index]))
    print("number of SV:", len(a))

    # plot the decision boundary on both datasets for the best svm
    plot_svm_decision_boundary(clf, x_train, y_train, x_test, y_test)

    # plot the score depending of g
    plot_score_vs_gamma(train_scores, test_scores, gammas)
示例#29
0
def ex_2_a(x_train, y_train, x_test, y_test):
    """
    Solution for exercise 2 a)
    :param x_train: Training samples (2-dimensional)
    :param y_train: Training labels
    :param x_test: Testing samples (2-dimensional)
    :param y_test: Testing labels
    :return:
    """
    ###########
    ## Train an SVM with a linear kernel for the given dataset
    ## and plot the decision boundary and support vectors  for each using 'plot_svm_decision_boundary' function
    ###########

    clf = svm.SVC(kernel='linear')
    clf.fit(x_train, y_train)
    plot_svm_decision_boundary(clf, x_train, y_train, x_test, y_test)
    print("ex_2_a score:", clf.score(x_test, y_test))

    pass
示例#30
0
def ex_2_a(x_train, y_train, x_test, y_test):
    """
    Solution for exercise 2 a)
    :param x_train: Training samples (2-dimensional)
    :param y_train: Training labels
    :param x_test: Testing samples (2-dimensional)
    :param y_test: Testing labels
    :return:
    """
    ###########
    ## TODO:
    ## Train an SVM with a linear kernel for the given dataset
    ## and plot the decision boundary and support vectors  for each using 'plot_svm_decision_boundary' function
    ###########

    machine = svm.SVC(kernel='linear')
    machine.fit(x_train, y_train)
    plot_svm_decision_boundary(machine, x_train, y_train, x_test, y_test)

    print('Linear SVM score: {}'.format(machine.score(x_test, y_test)))