예제 #1
0
    def draw_adaline_gd_graph(self):
        # 특성을 표준화합니다.
        X = self.X
        y = self.y
        X_std = np.copy(X)
        X_std[:, 0] = (X[:, 0] - X[:, 0].mean()) / X[:, 0].std()
        X_std[:, 1] = (X[:, 1] - X[:, 1].mean()) / X[:, 1].std()

        ada = AdalineGD(n_iter=15, eta=0.01)
        ada.fit(X_std, y)

        plot_decision_regions(X_std, y, classifier=ada)
        plt.title('Adaline - Gradient Descent')
        plt.xlabel('sepal length [standardized]')
        plt.ylabel('petal length [standardized]')
        plt.legend(loc='upper left')
        plt.tight_layout()
        plt.show()

        plt.plot(range(1, len(ada.cost_) + 1), ada.cost_, marker='o')
        plt.xlabel('Epochs')
        plt.ylabel('Sum-squared-error')

        plt.tight_layout()
        plt.show()
예제 #2
0
    def draw_adaline_graph(self):
        X = self.X
        y = self.y
        fig, ax = plt.subplots(nrows=1, ncols=2, figsize=(10, 4))

        ada1 = AdalineGD(n_iter=10, eta=0.01).fit(X, y)
        ax[0].plot(range(1, len(ada1.cost_) + 1), np.log10(ada1.cost_), marker='o')
        ax[0].set_xlabel('Epochs')
        ax[0].set_ylabel('log(Sum-squared-error)')
        ax[0].set_title('Adaline - Learning rate 0.01')

        ada2 = AdalineGD(n_iter=10, eta=0.0001).fit(X, y)
        ax[1].plot(range(1, len(ada2.cost_) + 1), ada2.cost_, marker='o')
        ax[1].set_xlabel('Epochs')
        ax[1].set_ylabel('Sum-squared-error')
        ax[1].set_title('Adaline - Learning rate 0.0001')

        plt.show()
from model.plot import plot_decision_regions

import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from sklearn.datasets import load_iris
%matplotlib inline

iris = load_iris()

X = pd.DataFrame(iris.data, columns=iris.feature_names).iloc[0:100, [0,2]].values
y = iris.target[0:100]
y = np.where(y == 0, 1, -1)

fig, ax = plt.subplots(nrows=1, ncols=2, figsize=(10, 4))
ada1 = AdalineGD(eta=0.01, n_iter=10).fit(X, y)
ax[0].plot(range(1, len(ada1.cost_)+1),
            np.log(ada1.cost_), marker='s',
            color = 'red')
ax[0].set_xlabel('Epoch')
ax[0].set_ylabel('log(sum-square-error)')
ax[0].set_title('Adaline learning rate 0.01')

ada2 = AdalineGD(eta=0.0001, n_iter=10).fit(X, y)
ax[1].plot(range(1, len(ada2.cost_)+1),
            np.log(ada2.cost_), marker='o',
            color='blue')
ax[1].set_xlabel('Epoch')
ax[1].set_ylabel('log(sum-square-error)')
ax[1].set_title('Adaline learning rate 0.0001')