Пример #1
0
 def test_net_input(self):
     pn = Perceptron(0.1, 10)
     x = np.array([[5.1, 1.4], [4.9, 1.4], [4.7, 1.3], [4.6, 1.5], \
         [5.7, 4.2], [6.2, 4.3], [5.1, 3.0], [5.7, 4.1]])
     y = np.array([-1, -1, -1, -1, 1, 1, 1, 1])
     # Fit the weights to the data
     pn.fit(x, y)
Пример #2
0
    def test_predict(self):
        """
        Tests the Perceptron class function predict() function.
        """
        pn = Perceptron(0.1, 10)
        x = np.array([[5.1, 1.4], [4.9, 1.4], [4.7, 1.3], [4.6, 1.5], \
            [5.7, 4.2], [6.2, 4.3], [5.1, 3.0], [5.7, 4.1]])
        y = np.array([-1, -1, -1, -1, 1, 1, 1, 1])

        # Test calling the predict function
        pn.fit(x, y)
        predictions = pn.predict(x)
        self.assertEqual(np.array_equal(predictions, y), True)
Пример #3
0
def perceptron_example():
    df = pd.read_csv('https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data', header=None)

    y = df.iloc[0:100, 4].values
    y = np.where(y == 'Iris-setosa', -1, 1)

    x = df.iloc[0:100, [0, 2]].values
    xlabel_text = "Index 0"
    ylabel_text = "Index 2"
    zlabel_text = "Index 3"

    fig = plt.figure()
    ax = fig.add_subplot(111)
    ax.scatter(x[:50, 0], x[:50, 1], color='red', marker='o', label='setosa')
    ax.scatter(x[50:100, 0], x[50:100, 1], color='blue', marker='x', label='versicolor')
    ax.set_xlabel(xlabel_text)
    ax.set_ylabel(ylabel_text)
    ax.legend()

    perceptron = Perceptron(.1, 100)
    perceptron.fit(x, y)
    # print(perceptron.get_weights())
    plt.figure()
    perceptron.plot_decision_regions(x, y, perceptron)
    plt.xlabel(xlabel_text)
    plt.ylabel(ylabel_text)
    plt.show()
Пример #4
0
    def test_init(self):
        """
        Tests the Perceptron class init function
        """
        # Testing Valid arguments
        pn = Perceptron(0.1, 10)

        # Check that the learning rate and niter were initialized
        testRate = 0.1
        self.assertEqual(type(pn.rate), type(testRate))
        self.assertEqual(pn.rate, testRate)
        testNiter = 10
        self.assertEqual(type(pn.niter), type(testNiter))
        self.assertEqual(pn.niter, 10)

        # Check that the errors and weight array was initialized to something
        testArray = np.array([0])
        self.assertEqual(len(pn.errors), 1)
        self.assertEqual(type(pn.errors), type(testArray))
        self.assertEqual(len(pn.weight), 2)
        self.assertEqual(type(pn.weight), type(testArray))

        # Testing invalid argument for learning rate
        text_trap = io.StringIO()  # create a text trap and redirect stdout
        sys.stdout = text_trap
        pn = Perceptron(1, 10)
        sys.stdout = sys.__stdout__  # now restore stdout function

        self.assertEqual(pn.rate, 0.1)
        self.assertEqual(pn.niter, 10)

        # Testing invalid argument for niter
        text_trap = io.StringIO()  # create a text trap and redirect stdout
        sys.stdout = text_trap
        pn = Perceptron(0.1, 0.10)
        sys.stdout = sys.__stdout__  # now restore stdout function
        self.assertEqual(pn.niter, 10)
        self.assertEqual(pn.rate, 0.1)
Пример #5
0
    def test_dotProduct(self):
        """
        Tests the Perceptron class function dotProduct() that computes the dot
        product for the attributes of x and the perceptron weight
        """
        pn = Perceptron(0.1, 10)
        x = [[0, 0], [1, 1], [2, 2], [3, 3], [4, 4]]
        y = np.array([-1, -1, -1, -1])
        text_trap = io.StringIO()  # create a text trap and redirect stdout
        sys.stdout = text_trap
        self.assertEqual(math.isnan(pn.dotProduct(x)), math.isnan(math.nan))
        sys.stdout = sys.__stdout__  # now restore stdout function

        # Testing with valid sizes
        pn.weight = np.array([-0.31, 0.7, 0.1])
        self.assertEqual(abs(pn.dotProduct(x[0]) - 0.1) < 0.001, True)
        self.assertEqual(abs(pn.dotProduct(x[1]) - 0.49) < 0.001, True)
        self.assertEqual(
            abs(pn.dotProduct(x[2]) - 0.8799999999999999) < 0.001, True)
        self.assertEqual(
            abs(pn.dotProduct(x[3]) - 1.2699999999999998) < 0.001, True)
Пример #6
0
    def test_fit(self):
        """
        Tests the Perceptron class function fit() function.
        """
        pn = Perceptron(0.1, 10)

        # Testing x parm not a numpy array
        x = [[0, 0], [1, 1], [2, 2], [3, 3], [4, 4]]
        y = np.array([-1, -1, -1, -1])
        text_trap = io.StringIO()  # create a text trap and redirect stdout
        sys.stdout = text_trap
        result = pn.fit(x, y)
        sys.stdout = sys.__stdout__  # now restore stdout function
        self.assertEqual(result[0], "x must be of type numpy array")

        # Testing x doesn't have the right dimensions
        x = np.array([-1, -1, -1, -1])
        y = np.array([-1, -1, -1, -1])
        text_trap = io.StringIO()  # create a text trap and redirect stdout
        sys.stdout = text_trap
        result = pn.fit(x, y)
        sys.stdout = sys.__stdout__  # now restore stdout function
        self.assertEqual(result[0], "x must have 2 dimensions")

        # Testing y parm not a numpy array
        y = [[0, 0], [1, 1], [2, 2], [3, 3], [4, 4]]
        x = np.array([-1, -1, -1, -1])
        text_trap = io.StringIO()  # create a text trap and redirect stdout
        sys.stdout = text_trap
        result = pn.fit(x, y)
        sys.stdout = sys.__stdout__  # now restore stdout function
        self.assertEqual(result[0], "y must be of type numpy array")

        # Testing y doesn't have the right dimensions
        y = np.array([[0, 0], [1, 1], [2, 2], [3, 3], [4, 4]])
        x = np.array([[0, 0], [1, 1], [2, 2], [3, 3], [4, 4]])
        text_trap = io.StringIO()  # create a text trap and redirect stdout
        sys.stdout = text_trap
        result = pn.fit(x, y)
        sys.stdout = sys.__stdout__  # now restore stdout function
        self.assertEqual(result[0], "y must have 1 dimension")

        # Testing that the sizes of x and y don't match
        x = np.array([[5.1, 1.4], [4.9, 1.4], [4.7, 1.3], [4.6, 1.5]])
        y = np.array([-1, -1, -1, -1, -1])
        text_trap = io.StringIO()  # create a text trap and redirect stdout
        sys.stdout = text_trap
        result = pn.fit(x, y)
        sys.stdout = sys.__stdout__  # now restore stdout function
        #self.assertEqual(result[0], "sizes of x and y must match")

        # Testing valid parameters
        pn = Perceptron(0.1, 10)
        x = np.array([[5.1, 1.4], [4.9, 1.4], [4.7, 1.3], [4.6, 1.5], \
            [5.7, 4.2], [6.2, 4.3], [5.1, 3.0], [5.7, 4.1]])
        y = np.array([-1, -1, -1, -1, 1, 1, 1, 1])
        # Check the initial set of the weight array
        self.assertEqual(np.array_equal(pn.weight, np.array([0, 0])), True)
        # Fit the weights to the data
        pn.fit(x, y)
        # Check that the size of the weight array was updated based on size of x and y
        self.assertEqual(len(y), len(x))
        self.assertEqual(len(pn.weight), len(x[0]) + 1)
Пример #7
0
def test_perceptron():
    df = pd.read_csv('https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data', header=None)
    print("Creating a two-feature data set")
    y = df.iloc[0:100, 4].values
    y = np.where(y == 'Iris-setosa', -1, 1)
    X = df.iloc[0:100, [0, 2]].values
    print("Creating Perceptron")
    pn = Perceptron(0.1, 10)
    print("Perceptron created")
    pn.fit(X, y)
    print("Perceptron fitted")
    print("Error List")
    print(pn.errors)
    print("Weight vector")
    print(pn.weight)
    print("Using plot_decision_regions function")
    plot_decision_regions(X, y, pn, 0.02)
    pn.plot(X, y)
    plt.xlabel('sepal length [cm]')
    plt.ylabel('petal length [cm]')
    plt.title('Petal Length vs Sepal Length')

    print("Creating a three-feature data set")
    y = df.iloc[0:150, 4].values
    y = np.where(y == 'Iris-setosa', -1, 1)
    X = df.iloc[0:150, [0, 1, 2]].values
    print("Creating Perceptron")
    pn1 = Perceptron(0.1, 10)
    print("Perceptron created")
    pn1.fit(X, y)
    print("Perceptron fitted")
    print("Error List")
    print(pn1.errors)
    print("Weight vector")
    print(pn1.weight)

    print("Creating a perceptron that does not have enough iterations to learn the three-feature data set")
    pn2 = Perceptron(0.1, 4)
    print("Perceptron created")
    pn2.fit(X, y)
    print("Perceptron fitted")
    print("Error List")
    print(pn2.errors)
    print("Weight vector")
    print(pn2.weight)
Пример #8
0
    Topic: Develop generic binary classifier perceptron 
    class in ML.py.  It has to taketraining  set  of  any  size.   
    Class  must  include  four  functions  :init(),  fit()  ,netinput(), 
    predict(), One more supportive function to display result.

'''
from ML import Perceptron
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import sys
from matplotlib.colors import ListedColormap

#pn instance variable pn assign to Perceptron class
#Pass learning rate = 0.25 and Iteration = 10
pn = Perceptron(0.1, 10)

#Using Pandas import Iris dataset
df = pd.read_csv(
    'https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data',
    header=None)

#Only use initial 100 data value labels
y = df.iloc[0:100, 4].values

#Convert labels into -1 and 1
y = np.where(y == 'Iris-setosa', -1, 1)

#Extract only 2 parameters from data set
X = df.iloc[0:100, [0, 2]].values
x = df.iloc[0:100, [0 , 2]].values

# Visualize the data
plt.scatter(x[:50, 0], x[:50, 1], color='red', marker='o', label='Iris-setosa')
plt.scatter(x[50:100, 0], x[50:100, 1], color='blue', marker='x', label='Iris-versicolor')
plt.xlabel('sepal length')
plt.ylabel('petal length')
plt.legend(loc='upper left')
title = "Perceptron Demo:\n"
title += "Data Visualization for the Sepal Length and Petal Length of\nIris "
title += "Setosa and Iris Versicolor"
plt.title(title)
plt.show()

# Create Perceptron and fit it to the data
pn = Perceptron(0.1, 10)
pn.fit(x,y)   

# Plot the perceptron errors per iteration
plt.plot(range(1, len(pn.errors) + 1), pn.errors, marker='o')
plt.title("Errors per Iteration for Perceptron Training")
plt.xlabel('Iteration')
plt.ylabel('Number of misclassifications')
plt.show()

# Plot the decision boundary
title = "Decision Boundary Determined by Perceptron Model for Classifying\n"
title += "Iris Setosa and Iris Versicolor Based on Sepal Length and Petal Length"
xLabel = "sepal length"
yLabel = "petal length"
plotPerceptronDecisionRegions(x, y, pn, title, xLabel, yLabel, \
Пример #10
0
plt.ylabel('Petal Length [cm]')

# Plot the setosa data
plt.scatter(X[:50, 0], X[:50, 1], color='red', marker='o', label='setosa')

# Plot the versicolor data
plt.scatter(X[50:100, 0], X[50:100, 1], color='blue', marker='x', label='versicolor')

# Setup the plot legend
plt.legend(loc='upper left')

# Display the plot
plt.show()

# Setup the Perceptron
pn = Perceptron(0.1, 10)

# Fit X to y (i.e. find the weights)
pn.fit(X, y)

# Print the error array
print("Errors:", pn.errors)

# Plot the results of the first fit
plt.plot(range(1, len(pn.errors) + 1), pn.errors, marker='o')
plt.title('Iris Dataset')
plt.xlabel('Iteration')
plt.ylabel('# of Misclassifications')
plt.show()

print("Net Input X:", pn.net_input(X))