예제 #1
0
def main():
    # Load dataset
    data = load_iris()

    X = data['data']
    y = data['target']

    # Reduce to two classes
    X = X[y != 0]
    y = y[y != 0]
    y -= 1

    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33)

    clf = LogisticRegression()
    clf.fit(X_train, y_train)
    y_pred = clf.predict(X_test)

    accuracy = np.mean(y_pred == y_test)
    print("Accuracy:", accuracy)

    Plot().plot_in_2d(X,
                      y,
                      title="Logistic Regression",
                      accuracy=accuracy,
                      legend_labels=data['target_names'])
예제 #2
0
def main():
    data = load_iris()

    X = normalize(data['data'])
    y = data['target']

    # One-hot
    y = np.zeros((data['target'].shape[0], 3))
    y[np.arange(data['target'].shape[0]).astype('int'), data['target'].astype('int')] = 1

    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33)

    clf = MLP()
    clf.fit(X_train, y_train, n_epochs=1000, lr=0.01, n_units=16)

    y_pred = np.argmax(clf.predict(X_test), axis=1)
    y_test = np.argmax(y_test, axis=1)
    accuracy = np.mean(y_pred == y_test)

    y = np.argmax(y, axis=1)

    print ("Accuracy:", accuracy)

    Plot().plot_in_2d(X, y,
        title="Multilayer Perceptron",
        accuracy=accuracy,
        legend_labels=data['target_names'])
def main():
    # Load dataset
    data = load_iris()

    X = data['data']
    y = data['target']

    # Reduce to two classes
    X = X[y != 0]
    y = y[y != 0]
    y -= 1

    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33)

    clf = LogisticRegression()
    clf.fit(X_train, y_train)
    y_pred = np.rint(clf.predict(X_test))

    accuracy = np.mean(y_pred == y_test)
    print ("Accuracy:", accuracy)

    Plot().plot_in_2d(X, y,
        title="Logistic Regression",
        accuracy=accuracy,
        legend_labels=data['target_names'])
예제 #4
0
def main():
    data = load_iris()

    X = normalize(data['data'])
    y = data['target']

    # One-hot
    y = np.zeros((data['target'].shape[0], 3))
    y[np.arange(data['target'].shape[0]).astype('int'),
      data['target'].astype('int')] = 1

    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33)

    clf = MLP()
    clf.fit(X_train, y_train, n_epochs=1000, lr=0.01, n_units=16)

    y_pred = np.argmax(clf.predict(X_test), axis=1)
    y_test = np.argmax(y_test, axis=1)
    accuracy = np.mean(y_pred == y_test)

    y = np.argmax(y, axis=1)

    print("Accuracy:", accuracy)

    Plot().plot_in_2d(X,
                      y,
                      title="Multilayer Perceptron",
                      accuracy=accuracy,
                      legend_labels=data['target_names'])
예제 #5
0
def main():
    data = load_iris()
    X = data['data']
    y = data['target'].astype('int')

    kmeans = KMeans()
    clusters = kmeans.fit(X, k=3)

    Plot().plot_in_2d(X, clusters, "K-Means")
예제 #6
0
def main():
    data = load_iris()
    X = data['data']
    y = data['target'].astype('int')

    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33)

    clf = KNN()
    y_pred = clf.predict(5, X_test, X_train, y_train)

    accuracy = np.mean(y_pred == y_test)

    print("Accuracy:", accuracy)

    Plot().plot_in_2d(X,
                      y,
                      title="K-Nearest Neighbors",
                      accuracy=accuracy,
                      legend_labels=data['target_names'])
예제 #7
0
def main():
    data = load_iris()
    X = data['data']
    y = data['target']

    # Reduce to two classes
    X = X[y != 2]
    y = y[y != 2]

    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33)

    clf = LDA()
    clf.fit(X_train, y_train)
    y_pred = clf.predict(X_test)

    accuracy = np.mean(y_pred == y_test)

    print ("Accuracy:", accuracy)

    Plot().plot_in_2d(X, y,
        title="Linear Discriminant Analysis",
        accuracy=accuracy,
        legend_labels=data['target_names'])