コード例 #1
0
    def run(self, show_weights=False):
        for learning_rate in self.learning_rates:
            print('\nSimple perceptron with learning rate %.2f' %
                  learning_rate)

            features, labels = DataSetLoader(self.training_file).load()
            perceptron = SimplePerceptron(learning_rate)
            weights = perceptron.train(features, labels, 20)

            if show_weights:
                print('\nDetected weights')
                print(weights)

            test_features, test_labels = DataSetLoader(
                self.testing_file).load()

            invalid_entries = 0
            for i, x in enumerate(test_features):
                y1 = SimplePerceptron.predict(x, weights)
                y = test_labels[i]

                if y1 != y:
                    invalid_entries += 1

            error_rate = (invalid_entries / len(test_features)) * 100
            print('Invalid classified entries:', invalid_entries,
                  '-> Total entries:', len(test_features), '-> Error:',
                  str(round(error_rate, 2)) + '%\n')
コード例 #2
0
ファイル: excercises.py プロジェクト: Bensas/Perceptrons
def excercise1():
    x = [[-1, 1], [1, -1], [-1, -1], [1, 1]]
    y = [-1, -1, -1, 1]

    x = np.array(x)
    y = np.array(y)

    print("=== AND FUNCTION ===")
    print("X=" + str(x))
    print("Y=" + str(y))
    print("Training...")
    perceptron = SimplePerceptron(2)
    perceptron.train(x, y)
    print("Resulting weights: " + str(perceptron.weights))
    print_perceptron_test(perceptron, x, y)

    print("=== XOR FUNCTION ===")
    y = [1, 1, -1, -1]
    y = np.array(y)
    print("X=" + str(x))
    print("Y=" + str(y))
    print("Training...")
    perceptron = SimplePerceptron(2)
    perceptron.train(x, y)
    print("Resulting weights: " + str(perceptron.weights))
    print_perceptron_test(perceptron, x, y)
コード例 #3
0
class TestPerceptron(unittest.TestCase):
    def setUp(self):
        self.sp = SimplePerceptron()

    def test_and_gate(self):
        self.assertEqual(0, self.sp.and_gate(0, 0))
        self.assertEqual(0, self.sp.and_gate(1, 0))
        self.assertEqual(0, self.sp.and_gate(0, 1))
        self.assertEqual(1, self.sp.and_gate(1, 1))

    def test_nand_gate(self):
        self.assertEqual(1, self.sp.nand_gate(0, 0))
        self.assertEqual(1, self.sp.nand_gate(1, 0))
        self.assertEqual(1, self.sp.nand_gate(0, 1))
        self.assertEqual(0, self.sp.nand_gate(1, 1))

    def test_or_gate(self):
        self.assertEqual(0, self.sp.or_gate(0, 0))
        self.assertEqual(1, self.sp.or_gate(1, 0))
        self.assertEqual(1, self.sp.or_gate(0, 1))
        self.assertEqual(1, self.sp.or_gate(1, 1))
コード例 #4
0
from simple_perceptron import SimplePerceptron
from decaying_perceptron import DecayingPerceptron
from margin_perceptron import MarginPerceptron
from averaged_perceptron import AveragedPerceptron

# import matplotlib.pyplot as plt

if __name__ == '__main__':
    learning_rates = [1, 0.1, 0.01]
    margin_rates = [1, 0.1, 0.01]

    sp = SimplePerceptron()
    sp.train(learning_rates)
    sp.report()
    # plt.scatter(*zip(*sp._epoch_acc))
    # plt.plot(*zip(*sp._epoch_acc))
    # plt.show()

    dp = DecayingPerceptron()
    dp.train(learning_rates)
    dp.report()
    # plt.scatter(*zip(*dp._epoch_acc))
    # plt.plot(*zip(*dp._epoch_acc))
    # plt.show()

    mp = MarginPerceptron()
    mp.train(learning_rates, margin_rates)
    mp.report()
    # plt.scatter(*zip(*mp._epoch_acc))
    # plt.plot(*zip(*mp._epoch_acc))
    # plt.show()
コード例 #5
0
 def setUp(self):
     self.sp = SimplePerceptron()
コード例 #6
0
from simple_perceptron import SimplePerceptron
import numpy as numpy

print('\n')
print('AND: ')
# AND input data
input_data = numpy.array([[-1, -1], [-1, 1], [1, -1], [1, 1]])
# AND expected result for input
input_expected_data = numpy.array([-1, -1, -1, 1])

perceptron = SimplePerceptron(input_data, input_expected_data)
perceptron.train()
i = 0
while (i < len(input_expected_data)):
    print("input: ", input_data[i])
    print("expected value: ", input_expected_data[i])
    print("result value: ", perceptron.guess(input_data[i]))
    i += 1

print('\n')
print('OR: ')
# OR input data
or_input_data = numpy.array([[-1, 1], [1, -1], [-1, -1], [1, 1]])
# OR expected result for input
or_input_expected_data = numpy.array([1, 1, -1, 1])

or_perceptron = SimplePerceptron(or_input_data, or_input_expected_data)
or_perceptron.train()
i = 0
while (i < len(or_input_expected_data)):
    print("input: ", or_input_data[i])
コード例 #7
0
    return sigmoide(value) * (1 - sigmoide(value))

inputs = parser.get_inputs()
outputs = parser.get_outputs()
outputs_normalized = numpy.zeros(len(outputs))
max_value = numpy.max(outputs)
min_value = numpy.min(outputs)
i = 0
while(i < len(outputs)):
    outputs_normalized[i] = (outputs[i][0] - min_value) / (max_value - min_value)
    i += 1

split_i = 50
train_inputs = inputs[:split_i]
train_outputs = outputs_normalized[:split_i]
test_inputs = inputs[split_i:]
test_outputs = outputs_normalized[split_i:]

perceptron = SimplePerceptron(train_inputs, train_outputs, sigmoide, de_sigmoide, eta=0.1, iterations=300)
training_accuracies, test_accuracies, iters = perceptron.train(test_inputs, test_outputs, delta=0.001, print_data=True)

print(training_accuracies)
print(test_accuracies)

plt.plot(iters, training_accuracies, label="train")
plt.plot(iters, test_accuracies, label="test")

plt.xlabel('Epoch', fontsize=16)
plt.ylabel('Accuracy', fontsize=16)
plt.legend(title='Accuracy vs Epochs')
plt.show()
コード例 #8
0
def main():
    print("Ejercicio 1 - Perceptron Simple")
    c1, c2 = generate_separable_points(50, 0.5, 2)
    # plot_classes(c1,c2)
    data = get_train_data(c1, c2)

    sp = SimplePerceptron(data,
                          max_epoch=10000,
                          learning_rate=0.01,
                          visualize=False,
                          calculate_errors=True)
    sp.train()

    data = np.hstack((data, np.zeros((data.shape[0], 1))))
    for i in range(data.shape[0]):
        data[i, 3] = sp.predict(data[i, 0:2])

    print("Error: ", sqrt(sp.error))
    print("Weights: ", sp.weights)

    # sp.draw("Perceptron simple", "yellow")
    # plot_classes(c1,c2)
    m, b, margin, points = sp.optimus_hiperplane(n=4)

    perceptron_slope = sp.get_slope("Perceptron", "yellow")
    optimum_slope = Slope(m, b, "Hiperplano Optimo", "green")
    if points is not None:
        sp.draw_with_slope([perceptron_slope, optimum_slope], False,
                           np.array(points) + 0.01)
        print('final points', points)
    else:
        sp.draw_with_slope([perceptron_slope, optimum_slope], False)

    classifier = svm.SVC(C=1, kernel='linear')
    clf = classifier.fit(data[:, 0:2], data[:, 2])
    pred = classifier.predict(data[:, 0:2])
    plt.scatter(data[:, 0], data[:, 1], c=data[:, 2], s=30, cmap=plt.cm.Paired)
    ax = plt.gca()
    xlim = ax.get_xlim()
    ylim = ax.get_ylim()
    xx = np.linspace(xlim[0], xlim[1], 30)
    yy = np.linspace(ylim[0], ylim[1], 30)
    YY, XX = np.meshgrid(yy, xx)
    xy = np.vstack([XX.ravel(), YY.ravel()]).T
    Z = clf.decision_function(xy).reshape(XX.shape)

    # plot decision boundary and margins
    ax.contour(XX,
               YY,
               Z,
               colors='k',
               levels=[-1, 0, 1],
               alpha=0.5,
               linestyles=['--', '-', '--'])
    # plot support vectors
    ax.scatter(clf.support_vectors_[:, 0],
               clf.support_vectors_[:, 1],
               s=100,
               linewidth=1,
               facecolors='none',
               edgecolors='k')
    plt.show()