Exemplo n.º 1
0
def test_invalid_class():
    ppn = Perceptron(epochs=40, eta=0.01, random_seed=1)
    try:
        ppn.fit(X, y2)  # -2, 1 class
        assert (1 == 2)
    except ValueError:
        pass
Exemplo n.º 2
0
def test_standardized_iris_data():

    t1 = np.array([0.18, 0.41, 0.50])
    ppn = Perceptron(epochs=15, eta=0.01, random_seed=1)

    ppn.fit(X_std, y1)  # -1, 1 class
    assert ((y1 == ppn.predict(X_std)).all())
Exemplo n.º 3
0
def test_standardized_iris_data():

    t1 = np.array([0.18, 0.41, 0.50])
    ppn = Perceptron(epochs=15, eta=0.01, random_seed=1)

    ppn.fit(X_std, y1)  # -1, 1 class
    assert((y1 == ppn.predict(X_std)).all())
Exemplo n.º 4
0
def test_invalid_class():
    ppn = Perceptron(epochs=40, eta=0.01, random_seed=1)
    try:
        ppn.fit(X, y2)  # -2, 1 class
        assert 1 == 2
    except ValueError:
        pass
Exemplo n.º 5
0
def test_standardized_iris_data():
    ppn = Perceptron(epochs=15, eta=0.01, random_seed=1)
    ppn = ppn.fit(X_std, y0)
    assert (y0 == ppn.predict(X_std)).all(), ppn.predict(X_std)
Exemplo n.º 6
0
def test_nonstandardized_iris_data():

    t1 = np.array([0.078, -0.074, 0.46])
    ppn = Perceptron(epochs=40, eta=0.01, random_seed=1)
    ppn.fit(X, y1)  # -1, 1 class
    assert ((y1 == ppn.predict(X)).all())
Exemplo n.º 7
0
def test_score_function():
    ppn = Perceptron(epochs=15, eta=0.01, random_seed=1, shuffle=True)
    ppn = ppn.fit(X_std, y1)  # -1, 1 class
    acc = ppn.score(X_std, y1)
    assert acc == 1.0, acc
Exemplo n.º 8
0
# Author: Sebastian Raschka <sebastianraschka.com>
#
# License: BSD 3 clause

from mlxtend.classifier import Perceptron
from mlxtend.data import iris_data
import numpy as np
from nose.tools import raises

# Iris Data
X, y = iris_data()
X = X[:, [0, 3]]  # sepal length and petal width
X = X[0:100]  # class 0 and class 1
y0 = y[0:100]  # class 0 and class 1
y1 = np.where(y[0:100] == 0, -1, 1)  # class -1 and class 1
y2 = np.where(y[0:100] == 0, -2, 1)  # class -2 and class 1

# standardize
X_std = np.copy(X)
X_std[:, 0] = (X[:, 0] - X[:, 0].mean()) / X[:, 0].std()
X_std[:, 1] = (X[:, 1] - X[:, 1].mean()) / X[:, 1].std()


ppn = Perceptron(epochs=15, eta=0.01, random_seed=1)
ppn = ppn.fit(X_std, y1)  # -1, 1 class
assert((y1 == ppn.predict(X_std)).all())


        
test_standardized_iris_data()
Exemplo n.º 9
0
def test_0_1_class_iris_data():
    ppn = Perceptron(epochs=40, eta=0.05, random_seed=1)
    ppn = ppn.fit(X, y0)  # 0, 1 class
    print(y0)
    print(ppn.predict(X))
    assert (y0 == ppn.predict(X)).all()
Exemplo n.º 10
0
def test_standardized_iris_data_with_shuffle():
    ppn = Perceptron(epochs=15, eta=0.01, random_seed=1, shuffle=True)
    ppn = ppn.fit(X_std, y1)  # -1, 1 class
    assert (y1 == ppn.predict(X_std)).all()
Exemplo n.º 11
0
def test_nonstandardized_iris_data():
    ppn = Perceptron(epochs=100, eta=0.01, random_seed=1)
    ppn = ppn.fit(X, y0)
    assert (y0 == ppn.predict(X)).all()
Exemplo n.º 12
0
def test_standardized_iris_data_with_shuffle():
    ppn = Perceptron(epochs=15, eta=0.01, random_seed=1, shuffle=True)
    ppn = ppn.fit(X_std, y1)  # -1, 1 class
    assert (y1 == ppn.predict(X_std)).all()
Exemplo n.º 13
0
def test_array_dimensions():
    ppn = Perceptron(epochs=15, eta=0.01, random_seed=1)
    ppn = ppn.fit(np.array([1, 2, 3]), [-1])
Exemplo n.º 14
0
# Loading Data

X, y = iris_data()
X = X[:, [0, 3]] # sepal length and petal width
X = X[0:100] # class 0 and class 1
y = y[0:100] # class 0 and class 1

# standardize
X[:,0] = (X[:,0] - X[:,0].mean()) / X[:,0].std()
X[:,1] = (X[:,1] - X[:,1].mean()) / X[:,1].std()
# Note that this implementation of the Perceptron expects binary class labels in {0, 1}.


# Rosenblatt Perceptron

ppn = Perceptron(epochs=5, # num of passes, default 50
                 eta=0.05, # learning rate 0.0 ~ 1.0, default 0.1
                 random_seed=0,
                 print_progress=3)
ppn.fit(X, y)

plot_decision_regions(X, y, clf=ppn)
plt.title('Perceptron - Rosenblatt Perceptron Rule')
plt.show()

print('Bias & Weights: %s' % ppn.w_)

plt.plot(range(len(ppn.cost_)), ppn.cost_)
plt.xlabel('Iterations')
plt.ylabel('Missclassifications')
plt.show()
Exemplo n.º 15
0
def test_0_1_class_iris_data():

    t1 = np.array([0.26, -0.  ,  0.27])
    ppn = Perceptron(epochs=40, eta=0.01, random_seed=1)
    ppn.fit(X, y0)  # 0, 1 class
    assert((y0 == ppn.predict(X)).all())
Exemplo n.º 16
0
def test_nonstandardized_iris_data():

    t1 = np.array([0.078, -0.074, 0.46])
    ppn = Perceptron(epochs=40, eta=0.01, random_seed=1)
    ppn.fit(X, y1)  # -1, 1 class
    assert((y1 == ppn.predict(X)).all())
Exemplo n.º 17
0
def test_ary_persistency_in_shuffling():
    orig = X.copy()
    ppn = Perceptron(epochs=40, eta=0.05, random_seed=1)
    ppn = ppn.fit(X, y0)  # 0, 1 class
    np.testing.assert_almost_equal(orig, X, 6)
Exemplo n.º 18
0
def test_progress_3():
    ppn = Perceptron(epochs=15, eta=0.01, random_seed=1, print_progress=3)
    ppn = ppn.fit(X_std, y0)
Exemplo n.º 19
0
def test_score_function():
    ppn = Perceptron(epochs=15, eta=0.01, random_seed=1)
    ppn = ppn.fit(X_std, y0)
    acc = ppn.score(X_std, y0)
    assert acc == 1.0, acc
Exemplo n.º 20
0
def test_score_function():
    ppn = Perceptron(epochs=15, eta=0.01, random_seed=1, shuffle=True)
    ppn = ppn.fit(X_std, y1)  # -1, 1 class
    acc = ppn.score(X_std, y1)
    assert acc == 1.0, acc
Exemplo n.º 21
0
def test_array_dimensions():
    ppn = Perceptron(epochs=15, eta=0.01, random_seed=1)
    ppn = ppn.fit(np.array([1, 2, 3]), [-1])
Exemplo n.º 22
0
def test_standardized_iris_data_with_zero_weights():
    ppn = Perceptron(epochs=15, eta=0.01, random_seed=1, zero_init_weight=True)
    ppn = ppn.fit(X_std, y1)  # -1, 1 class
    assert (y1 == ppn.predict(X_std)).all()
Exemplo n.º 23
0
def test_standardized_iris_data_with_zero_weights():
    ppn = Perceptron(epochs=15, eta=0.01, random_seed=1, zero_init_weight=True)
    ppn = ppn.fit(X_std, y1)  # -1, 1 class
    assert (y1 == ppn.predict(X_std)).all()
Exemplo n.º 24
0
def test_0_1_class_iris_data():
    ppn = Perceptron(epochs=40, eta=0.05, random_seed=1)
    ppn = ppn.fit(X, y0)  # 0, 1 class
    print(y0)
    print(ppn.predict(X))
    assert (y0 == ppn.predict(X)).all()
Exemplo n.º 25
0
def test_progress_3():
    ppn = Perceptron(epochs=15, eta=0.01, random_seed=1, print_progress=3)
    ppn = ppn.fit(X_std, y0)
Exemplo n.º 26
0
def test_nonstandardized_iris_data():
    ppn = Perceptron(epochs=100, eta=0.01, random_seed=1)
    ppn = ppn.fit(X, y0)
    assert (y0 == ppn.predict(X)).all()
Exemplo n.º 27
0
def test_standardized_iris_data():
    ppn = Perceptron(epochs=15, eta=0.01, random_seed=1)
    ppn = ppn.fit(X_std, y0)
    assert (y0 == ppn.predict(X_std)).all(), ppn.predict(X_std)
Exemplo n.º 28
0
def test_0_1_class_iris_data():

    t1 = np.array([0.26, -0., 0.27])
    ppn = Perceptron(epochs=40, eta=0.01, random_seed=1)
    ppn.fit(X, y0)  # 0, 1 class
    assert ((y0 == ppn.predict(X)).all())
Exemplo n.º 29
0
def test_score_function():
    ppn = Perceptron(epochs=15, eta=0.01, random_seed=1)
    ppn = ppn.fit(X_std, y0)
    acc = ppn.score(X_std, y0)
    assert acc == 1.0, acc
Exemplo n.º 30
0
def test_ary_persistency_in_shuffling():
    orig = X.copy()
    ppn = Perceptron(epochs=40, eta=0.05, random_seed=1)
    ppn = ppn.fit(X, y0)  # 0, 1 class
    np.testing.assert_almost_equal(orig, X, 6)
Exemplo n.º 31
0
# Loading Data

X, y = wine_data()
X = X[:, [3, 12]] # hue, ash
X = X[0:100] # class 0 and class 1
y = y[0:100] # class 0 and class 1

# standardize
X[:,0] = (X[:,0] - X[:,0].mean()) / X[:,0].std()
X[:,1] = (X[:,1] - X[:,1].mean()) / X[:,1].std()


# Rosenblatt Perceptron

ppn = Perceptron(epochs=500,
                 eta=0.05,
                 random_seed=1,
                 print_progress=3)
ppn.fit(X, y)


plot_decision_regions(X, y, clf=ppn)
plt.title('Wine Data - Hue and Ash 500 Epochs')
plt.show()

print('Bias & Weights: %s' % ppn.w_)

plt.plot(range(len(ppn.cost_)), ppn.cost_)
plt.xlabel('Iterations')
plt.ylabel('Missclassifications')
plt.show()
Exemplo n.º 32
0
 def identificador(self):
     w_prueba = []
     Elementos = [self.word, self.comparar]
     auxiliar_contador = 0
     for j in Elementos:
         if len(self.word) <= len(j):
             for i in range(len(self.word)):
                 if self.word[i] == j[i]:
                     w_prueba.append(1)
                 else:
                     w_prueba.append(0)
         else:
             for i in range(len(j)):
                 if self.word[i] == j[i]:
                     w_prueba.append(1)
                 else:
                     w_prueba.append(0)
     #print (w_prueba)
     #print(sum(w_prueba[:len(self.word)]))
     auxiliar_x = sum(w_prueba[:len(self.word)])
     #print(auxiliar_x)
     auxiliar_x2 = sum(w_prueba[len(self.word):len(self.word) +
                                len(self.comparar)])
     #print(auxiliar_x2)
     X = np.array([[len(self.word),
                    sum(w_prueba[:len(self.word)])],
                   [len(self.word),
                    sum(w_prueba[:len(self.word)])],
                   [len(self.word),
                    sum(w_prueba[1:len(self.word)])],
                   [len(self.word),
                    sum(w_prueba[1:len(self.word)])],
                   [len(self.word) + 1,
                    len(self.word) - 1],
                   [len(self.word) - 1,
                    len(self.word) - 1],
                   [len(self.word) + 1,
                    len(self.word) - 2],
                   [len(self.word) - 1,
                    len(self.word) - 2]])
     #print(X[:,0])
     X[:, 0] = (X[:, 0] - X[:, 0].mean()) / X[:, 0].std()
     X[:, 1] = (X[:, 1] - X[:, 1].mean()) / X[:, 1].std()
     y = np.array([0, 0, 0, 0, 1, 1, 1, 1])
     #print(X)
     #print(y)
     ppn = Perceptron(epochs=5, eta=0.05, random_seed=0, print_progress=3)
     ppn.fit(X, y)
     X2 = (np.array([[len(self.word), auxiliar_x],
                     [len(self.word) + 1,
                      len(self.word) - 1],
                     [len(self.comparar), auxiliar_x2]]))
     #print("\n",X2[:,0].std())
     #print("\n",X2[:,1].std())
     X2[:, 0] = (X2[:, 0] - X2[:, 0].mean()) / X2[:, 0].std()
     X2[:, 1] = (X2[:, 1] - X2[:, 1].mean()) / X2[:, 1].std()
     #print(X2)
     #print("\n",ppn.predict(X2))
     resultado = ppn.predict(X2)
     self.encontro = resultado[2]
     print("\n\n")
     a = os.system("clear")
     return self.encontro