def main():
    # load sample data
    X, X_label = multivariate_normal.load_data_with_label()

    # split all data into train and test set
    X_train, X_test, X_label_train, X_label_test = train_test_split(X, X_label)

    # compute theta
    m, dim = X.shape
    initial_theta = np.insert(np.zeros([dim, 1], dtype=np.float32),
                              0,
                              1,
                              axis=0)
    theta = fmin_bfgs(compute_cost,
                      initial_theta,
                      args=(X_train, X_label_train))
    print theta

    # plot test data
    colors = ['r', 'b']
    for i in range(2):
        x, y = X_test[X_label_test == i, 0], X_test[X_label_test == i, 1]
        plt.scatter(x, y, color=colors[i], marker='x')

    # plot decision boundary
    # intercept = theta[0]
    a = -1.0 * theta[1] / theta[2]
    xx = np.linspace(-20, 10)
    yy = a * xx - theta[0] / theta[2]
    plt.plot(xx, yy, 'k-')
    plt.show()
def main():
    # load sample data
    X, X_label = multivariate_normal.load_data_with_label()

    # split all data into train and test set
    X_train, X_test, X_label_train, X_label_test = train_test_split(X, X_label)

    # compute theta
    m, dim = X.shape
    initial_theta = np.insert(np.zeros([dim, 1], dtype=np.float32),
                              0, 1, axis=0)
    theta = fmin_bfgs(compute_cost, initial_theta, args=(X_train,
                                                         X_label_train))
    print theta

    # plot test data
    colors = ['r', 'b']
    for i in range(2):
        x, y = X_test[X_label_test == i, 0], X_test[X_label_test == i, 1]
        plt.scatter(x, y, color=colors[i], marker='x')

    # plot decision boundary
    # intercept = theta[0]
    a = -1.0 * theta[1] / theta[2]
    xx = np.linspace(-20, 10)
    yy = a * xx - theta[0] / theta[2]
    plt.plot(xx, yy, 'k-')
    plt.show()
Beispiel #3
0
def main():
    X, X_labels = multivariate_normal.load_data_with_label()
    X_train, X_test, y_train, y_test = train_test_split(X, X_labels)

    clf = SVC()
    clf.fit(X_train, y_train)
    pred = clf.predict(X_test)
    X_labels_uniq = map(np.str, np.unique(X_labels))
    print classification_report(y_test, pred,
                                target_names=X_labels_uniq)

    # plot decision boundary with meshgrid
    h = 0.1
    x_min, x_max = X_train[:, 0].min() - 1, X_train[:, 0].max() + 1
    y_min, y_max = X_train[:, 1].min() - 1, X_train[:, 1].max() + 1
    xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
                         np.arange(y_min, y_max, h))
    Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
    Z = Z.reshape(xx.shape)
    plt.contourf(xx, yy, Z, cmap=plt.cm.Paired, alpha=0.8)

    # plot also the training points
    plt.scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap=plt.cm.Paired)
    plt.xlim(x_min, x_max)
    plt.ylim(y_min, y_max)
    plt.show()
def main():
    X, X_labels = multivariate_normal.load_data_with_label()
    X_train, X_test, X_labels_train, X_labels_test = train_test_split(X,
                                                                      X_labels)
    clf = GaussianNB()
    clf.fit(X_train, X_labels_train)
    pred = clf.predict(X_test)
    X_labels_uniq = map(np.str, np.unique(X_labels))
    print classification_report(X_labels_test, pred,
                                target_names=X_labels_uniq)
Beispiel #5
0
def main():
    X, X_labels = multivariate_normal.load_data_with_label()
    X_train, X_test, X_labels_train, X_labels_test = train_test_split(
        X, X_labels)
    clf = GaussianNB()
    clf.fit(X_train, X_labels_train)
    pred = clf.predict(X_test)
    X_labels_uniq = map(np.str, np.unique(X_labels))
    print classification_report(X_labels_test,
                                pred,
                                target_names=X_labels_uniq)
Beispiel #6
0
def main():
    # load sample data
    X, X_labels = multivariate_normal.load_data_with_label()
    X_labels[X_labels == 0] = -1

    # input data
    X_train, X_test, y_train, y_test = train_test_split(X, X_labels)

    # training
    model = fit(X_train, y_train)

    # show boundary
    show_boundary(model, X_train, y_train)
def main():
    # load sample data
    X, X_labels = multivariate_normal.load_data_with_label()

    # training
    X_train, X_test, y_train, y_test = train_test_split(X, X_labels)
    mean, var, class_prior = fit(X_train, y_train)
    print 'mean', mean
    print 'var', var

    model = defaultdict(np.array)
    model['mean'] = mean
    model['var'] = var
    model['class_prior'] = class_prior

    # predict
    pred = predict(model, X_test, y_test)

    # print result
    X_labels_uniq = map(np.str, np.unique(X_labels))
    print classification_report(y_test, pred, target_names=X_labels_uniq)
def main():
    # load sample data
    X, X_labels = multivariate_normal.load_data_with_label()

    # training
    X_train, X_test, y_train, y_test = train_test_split(X, X_labels)
    mean, var, class_prior = fit(X_train, y_train)
    print "mean", mean
    print "var", var

    model = defaultdict(np.array)
    model["mean"] = mean
    model["var"] = var
    model["class_prior"] = class_prior

    # predict
    pred = predict(model, X_test, y_test)

    # print result
    X_labels_uniq = map(np.str, np.unique(X_labels))
    print classification_report(y_test, pred, target_names=X_labels_uniq)
def main():
    # load sample data
    X, X_labels = multivariate_normal.load_data_with_label()
    n_labels = max(X_labels)+1
    n_features = len(X[0])
def main():
    # load sample data
    X, X_labels = multivariate_normal.load_data_with_label()
    n_labels = max(X_labels) + 1
    n_features = len(X[0])