Пример #1
0
def adaline_iris_train_std(X, y):
    fig, ax = plt.subplots(nrows=1, ncols=2, figsize=(8, 4))
    X_std = np.copy(X)
    X_std[:, 0] = (X_std[:, 0] - X[:, 0].mean()) / X[:, 0].std()
    X_std[:, 1] = (X_std[:, 1] - X[:, 1].mean()) / X[:, 1].std()
    ax[0].scatter(X[:50, 0],
                  X[:50, 1],
                  color='red',
                  marker='o',
                  label='setosa')
    ax[0].scatter(X[50:100, 0],
                  X[50:100, 1],
                  color='blue',
                  marker='x',
                  label='versicolor')
    ax[1].scatter(X_std[:50, 0],
                  X_std[:50, 1],
                  color='red',
                  marker='o',
                  label='setosa')
    ax[1].scatter(X_std[50:100, 0],
                  X_std[50:100, 1],
                  color='blue',
                  marker='x',
                  label='versicolor')
    ax[0].set_title('Non-normalized')
    ax[1].set_title('Normalized')

    plt.show()

    ada = AdalineGD(n_iter=15, eta=0.01)
    ada.fit(X_std, y)
    plot_decision_regions(X_std, y, classifier=ada)

    plt.title('Adaline - Gradient descent')
    plt.xlabel('sepal length[normal]')
    plt.ylabel('petal length [normal]')
    plt.legend(loc='upper left')
    plt.show()

    plt.plot(range(1, len(ada.cost_) + 1), ada.cost_, marker='o')
    plt.xlabel('Epochs')
    plt.ylabel('Sum-squared-error')
    plt.show()
Пример #2
0
def adaline_iris_train(X, y):
    fig, ax = plt.subplots(nrows=1, ncols=3, figsize=(8, 4))
    ada1 = AdalineGD(eta=0.01, n_iter=10).fit(X, y)
    ax[0].plot(range(1, len(ada1.cost_) + 1), np.log(ada1.cost_), marker='o')
    ax[0].set_xlabel('Epochs')
    ax[0].set_ylabel('log(Sum-squared-error)')
    ax[0].set_title('Adaline - Learning rate of 0.01')

    ax[1].plot(range(1, len(ada1.cost_) + 1), ada1.cost_, marker='o')
    ax[1].set_xlabel('Epochs')
    ax[1].set_ylabel('Sum-squared-error')
    ax[1].set_title('Adaline - Learning rate of 0.01')
    ada2 = AdalineGD(n_iter=10, eta=0.0001).fit(X, y)
    ax[2].plot(range(1, len(ada2.cost_) + 1), ada2.cost_, marker='o')
    ax[2].set_xlabel('Epochs')
    ax[2].set_ylabel('Sum-squared-error')
    ax[2].set_title('Adaline - Learning rate of 0.0001')

    plt.show()
y = df.iloc[:,6].values
y = np.where(y >= 0.00005, 1, -1)
X = df.iloc[:,[1,2]].values
X_std = np.copy(X)
X_std[:,0] = (X[:,0] - X[:,0].mean()) / X[:,0].std()
X_std[:,1] = (X[:,1] - X[:,1].mean()) / X[:,1].std()

############
# analysis #
############

ppn = Perceptron.Perceptron(eta=0.1,n_iter=10)
ppn.fit(X,y)

ada = AdalineGD.AdalineGD(eta=0.01, n_iter=15)
ada.fit(X_std, y)

adaS = AdalineSGD.AdalineSGD(eta=0.01, n_iter=15, random_state=1)
adaS.fit(X_std,y)

########
# plot #
########

plt.plot(range(1,len(ppn.errors_) + 1), ppn.errors_, marker='o')
plt.xlabel('Epochs')
plt.ylabel('Number of misclassifications')
plt.show()

plt.plot(range(1,len(ada.cost_) + 1), ada.cost_, marker='o')
Пример #4
0
def run():
	#ppn.run()
	agd.run()
Пример #5
0
import numpy as np
import Inputdata
import AdalineGD

ppn = Perceptron.Perceptron(eta=0.1, n_iter=10)

ppn.fit(Inputdata.X, Inputdata.y)

plt.plot(range(1, len(ppn.errors_) + 1), ppn.errors_, marker='o')
plt.xlabel('Epochs')
plt.ylabel('Number of updates')

# plt.savefig('images/02_07.png', dpi=300)
plt.show()

fig, ax = plt.subplots(nrows=1, ncols=2, figsize=(10, 4))

ada1 = AdalineGD.AdalineGD(n_iter=10, eta=0.01).fit(Inputdata.X, Inputdata.y)
ax[0].plot(range(1, len(ada1.cost_) + 1), np.log10(ada1.cost_), marker='o')
ax[0].set_xlabel('Epochs')
ax[0].set_ylabel('log(Sum-squared-error)')
ax[0].set_title('Adaline - Learning rate 0.01')

ada2 = AdalineGD.AdalineGD(n_iter=10, eta=0.0001).fit(Inputdata.X, Inputdata.y)
ax[1].plot(range(1, len(ada2.cost_) + 1), ada2.cost_, marker='o')
ax[1].set_xlabel('Epochs')
ax[1].set_ylabel('Sum-squared-error')
ax[1].set_title('Adaline - Learning rate 0.0001')

# plt.savefig('images/02_11.png', dpi=300)
plt.show()
Пример #6
0
def run():
    #ppn.run()
    agd.run()
Пример #7
0
ppn.fit(X, Y)
plt.plot(range(1, len(ppn.errors_) + 1), ppn.errors_, marker='o')
plt.xlabel('Epochs')
plt.ylabel('Number of misclassifications')
plt.show()

plot_decision_region(X, Y, classifier=ppn)
plt.xlabel('sepal length [cm]')
plt.ylabel('petal length [cm]')
plt.legend(loc='upper left')
plt.show()

import AdalineGD

fig, ax = plt.subplots(nrows=1, ncols=2, figsize=(8, 4))
ada1 = AdalineGD.AdalineGD(n_iter=10, eta=0.01).fit(X, Y)
ax[0].plot(range(1, len(ada1.cost_) + 1), np.log10(ada1.cost_), marker='o')
ax[0].set_xlabel('Epochs')
ax[0].set_ylabel('log(Sum-squared-error)')
ax[0].set_title('Adalnie - Learning rate 0.01')

ada2 = AdalineGD.AdalineGD(n_iter=10, eta=0.0001).fit(X, Y)
ax[1].plot(range(1, len(ada2.cost_) + 1), ada2.cost_, marker='o')
ax[1].set_xlabel('Epoches')
ax[1].set_ylabel('Sum-squared-error')
ax[1].set_title('Adaline - Learning rate 0.0001')
plt.show()

X_std = np.copy(X)
X_std[:, 0] = (X[:, 0] - X[:, 0].mean()) / X[:, 0].std()
X_std[:, 1] = (X[:, 1] - X[:, 1].mean()) / X[:, 1].std()
Пример #8
0
import numpy as np
from AdalineGD import *
from percptron_plot import *

df = pd.read_csv('E:/GIT/ML/LGD/iris.data',header = None)
df.tail()
#select setosa and versicolor
y = df.iloc[0:100,4].values
y = np.where(y == "Iris-setosa",-1,1)
#extract sepal length and petal length
X = df.iloc[0:100,[0,2]].values

X_std[:,0] = (X[:,0] - X[:,0].mean())/X[:,0].std()
X_std[:,1] = (X[:,1] - X[:,1].mean())/X[:,1].std()

ada = AdalineGD(n_iter = 15,eta = 0.01)
ada.fit(X_std,y)

plot_decision_regions(X_std,y,ada)
plt.title('Adaline - Gradient Descent')
plt.xlabel('sepal length [standardized]')
plt.ylabel('petal length [standardized]')
plt.legend(loc = 'upper left')
plt.tight_layout()
plt.show()

plt.plot(range(1,len(ada.cost_)+1),ada.cost_,marker = 'o')
plt.xlabel('Epochs')
plt.ylabel('Sum-squared-error')
plt.tight_layout()
plt.show()
Пример #9
0
execfile("tools.py")

import AdalineGD as ad
import pandas as pd
df = pd.read_csv(
    'http://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data',
    header=None)
y = df.iloc[0:100, 4].values
y = np.where(y == 'Iris-setosa', -1, 1)
X = df.iloc[0:100, [0, 2]].values

niter = 10
eta = 0.01
ada1 = ad.AdalineGD(eta=eta, n_iter=niter).fit(X, y)
fig, ax = plt.subplots(nrows=1, ncols=2, figsize=(10, 4))
ax[0].plot(range(1, len(ada1.cost_) + 1), np.log10(ada1.cost_), marker='o')
ax[0].set_xlabel('Epochs')
ax[0].set_ylabel('log(Sum-squared-error)')
ax[0].set_title('Adaline - Learning rate 0.01')
ada2 = ad.AdalineGD(n_iter=10, eta=0.0001).fit(X, y)
ax[1].plot(range(1, len(ada2.cost_) + 1), \
           ada2.cost_, marker='o')
ax[1].set_xlabel('Epochs')
ax[1].set_ylabel('Sum-squared-error')
ax[1].set_title('Adaline - Learning rate 0.0001')
plt.show()

# Plot weights
weights = array(ada1.wlist_)
figure()
scatter(arange(niter + 1), weights[:, 0])  #-weights[-1,0])
Пример #10
0
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
from AdalineGD import *

df = pd.read_csv('E:/GIT/ML/LGD/iris.data', header=None)
df.tail()
#select setosa and versicolor
y = df.iloc[0:100, 4].values
y = np.where(y == "Iris-setosa", -1, 1)
#extract sepal length and petal length
X = df.iloc[0:100, [0, 2]].values

fig, ax = plt.subplots(nrows=1, ncols=2, figsize=(10, 4))
ada1 = AdalineGD(n_iter=10, eta=0.01)
ada1.fit(X, y)
ax[0].plot(range(1, len(ada1.cost_) + 1), np.log10(ada1.cost_), marker='o')
ax[0].set_xlabel('Epochs')
ax[0].set_ylabel('log(Sum-squared-error)')
ax[0].set_title('Adaline - Learning rate 0.01')

ada2 = AdalineGD(n_iter=10, eta=0.0001)
ada2.fit(X, y)
ax[1].plot(range(1, len(ada2.cost_) + 1), np.log10(ada2.cost_), marker='o')
ax[1].set_xlabel('Epochs')
ax[1].set_ylabel('log(Sum-squared-error)')
ax[1].set_title('Adaline - Learning rate 0.0001')

plt.show()
Пример #11
0
    ## Plot class samples
    for idx, cl in enumerate(np.unique(y)):
        plt.scatter(x=X[y == cl, 0],
                    y=X[y == cl, 1],
                    alpha=0.8,
                    c=cmap(idx),
                    marker=markers[idx],
                    label=cl)
    plt.xlabel(x1label)
    plt.ylabel(x2label)


## Plot with two learning rates
fig, ax = plt.subplots(nrows=1, ncols=2, figsize=(8, 4))
ada1 = ada.AdalineGD(n_iter=10, eta=0.01).fit(X, y)
ax[0].plot(range(1, len(ada1.cost_) + 1), np.log10(ada1.cost_), marker='o')
ax[0].set_xlabel('Epochs')
ax[0].set_ylabel('log(Sum-squared-error)')
ax[0].set_title('Adaline - Learning rate 0.01')
ada2 = ada.AdalineGD(n_iter=10, eta=0.0001).fit(X, y)
ax[1].plot(range(1, len(ada2.cost_) + 1), ada2.cost_, marker='o')
ax[1].set_xlabel('Epochs')
ax[1].set_ylabel('log(Sum-squared-error)')
ax[1].set_title('Adaline - Learning rate 0.0001')
plt.show()

## After normalization
X_std = np.copy(X)
X_std[:, 0] = (X[:, 0] - X[:, 0].mean()) / X[:, 0].std()
X_std[:, 1] = (X[:, 1] - X[:, 1].mean()) / X[:, 1].std()