Exemple #1
0
def plot_sample(nonreg=True, k = 0):
    (X_train_raw, y_train, X_test_raw, y_test) = get_data()
    X_train = transform(X_train_raw)
    X_test = transform(X_test_raw)
    N = X_train.shape[0]

    if nonreg is True:
        w = linear_regression(X_train, y_train)
    else:
        w = weight_decay_regression(X_train, y_train, 10.0**k)

    def plot_decision_fn(X):
        X_trans = transform(X)
        return np.sign(np.dot(X_trans, w))

    (cont_x, cont_y, cont_z) = decision_boundary_2D(-1, 1, 0.0025, -1, 1, 0.0025, 
                                                    plot_decision_fn)

    print("E_in :", linear_error(X_train, y_train, w))
    print("E_out:", linear_error(X_test, y_test, w))

    x_plot = X_test_raw[:,0]
    y_plot = X_test_raw[:,1]
    c = np.where(y_test==1, 'r', 'b')
    plt.scatter(x_plot,y_plot, c=c)
    plt.contour(cont_x, cont_y, cont_z, [0], colors='g')
    plt.xlim([-1, 1])
    plt.ylim([-1, 1])
    plt.grid()
    plt.show()
Exemple #2
0
def caltech_classification_data():
    """NEED TO FIGURE OUT NN FOR CLASSIFICATION"""
    (X_train_raw, y_train, X_test_raw, y_test) = get_data()

    nn = NeuralNetwork([3, 4, 1])

    def plot_decision_fn(X):
        out = [compute_activations(x, nn)[-1] for x in X]
        return out

    (cont_x, cont_y, cont_z) = decision_boundary_2D(-1, 1, 0.0025, -1, 1, 0.0025, 
                                                    plot_decision_fn)

    x_plot = X_test_raw[:,0]
    y_plot = X_test_raw[:,1]
    c = np.where(y_test==1, 'r', 'b')
    plt.scatter(x_plot,y_plot, c=c)
    plt.contour(cont_x, cont_y, cont_z, [0], colors='g')
    plt.xlim([-1, 1])
    plt.ylim([-1, 1])
    plt.grid()
    plt.show()