Beispiel #1
0
    def setUp(self):
        # Layer 1 Perceptrons:
        n1 = Perceptron([0.5, 0.5], Step(0).activate, "n1", bias=-1)
        n2 = Perceptron([-0.1, -0.1], Step(0).activate, "n2", bias=0)
        # Layer 2 Perceptrons:
        n3 = Perceptron([-1, -1], Step(0).activate, "n3", bias=0)

        l1 = PerceptronLayer(1, [n1, n2])
        l2 = PerceptronLayer(1, [n3])

        self.ntwrk = PerceptronNetwork([l1, l2])
Beispiel #2
0
    def setUp(self):
        n1 = Perceptron([0.5,0.5],Step(0).activate,"n1",bias=-1)
        n2 = Perceptron([-1,-1],Step(0).activate,"n2",bias=0)
        n3 = Perceptron([0.5,0.5],Step(0).activate,"n3",bias=-1)
        # Layer 2 Perceptrons:
        n4 = Perceptron([-1,-1,0],Step(0).activate,"n4",bias=0)
        n5 = Perceptron([0,0,1],Step(0).activate,"n5",bias=-1)

        l1 = PerceptronLayer(1,[n1,n2,n3])
        l2 = PerceptronLayer(2,[n4,n5])

        self.ntwrk = PerceptronNetwork([l1,l2])
Beispiel #3
0
 def setUp(self):
     """Prepares a OR-type Perceptron"""
     self.OR_Perceptron = Perceptron([0.5, 0.5], Step(0).activate, "OR",bias=-0.5)
Beispiel #4
0
def listfilter(wantedclasses: List[int], x: List[Union[int, float]], y: List[int]):
    """Gets the trainingset that only involves the wanted classes."""
    finalx = []
    finaly = []
    for index in range(len(y)):
        if y[index] in wantedclasses:
            finalx.append(list(x[index]))
            finaly.append(y[index])
    return finalx, finaly

firstx, firsty = listfilter([0,1],x,y)

# Train dan het eerste perceptron! We hebben hier te maken met twee trainingssets,
# dus het probleem zou wel lineair op te lossen moeten zijn.

percep = Perceptron([random.random() for i in range(4)], Step().activate)
percep.update(firstx,firsty)

# Uncomment voor uitgebreide informatie
# print(percep.__str__())

print("End weights:")
print(percep.getweights())
print("End bias:")
print(percep.getbias())

# Train dan nu het tweede perceptron! Deze zou iets lastiger kunnen zijn, omdat we hier te maken
# hebben met drie klasses; we werken immers met Step() die alleen maar 0 of 1 kan geven.... Bovendien
# werken we in deze opdracht alleen maar met een enkele perceptron die getraind wordt. Als we een netwerk
# zouden maken zou dit geen probleem geweest zijn; dan hadden we voor elke klasse een node in de output-layer
# kunnen maken.
Beispiel #5
0
 def setUp(self):
     """Prepares a AND-type Perceptron"""
     self.AND_Perceptron = Perceptron([0.5, 0.5],
                                      Step(0).activate,
                                      "AND",
                                      bias=-1)
Beispiel #6
0
# Since the XOR gate solves a non-linear problem, we estimate that this perceptron will fail to initialise properly.
# In order to demonstrate this, we do **not** use the unittest module, as that would result in a lack of feedback
# as to why it went wrong. Instead, we will print the outcomes and show that the XOR port is not working as it should.

from classes.Perceptron import Perceptron
from classes.Activation import Step

import random
random.seed(1756708)

test1 = Perceptron([random.random() for x in range(2)], Step(0).activate)
test1.update([[0, 0], [0, 1], [1, 0], [1, 1]], [0, 1, 1, 0])

# Uncomment voor uitgebreide informatie
# print(test1.__str__())

print("End weights:")
print(test1.getweights())
print("End bias:")
print(test1.getbias())

print("Should be high (1):")
print("Input [1,0] gives:")
print(test1.activate([1, 0]))
print("Input [0,1] gives:")
print(test1.activate([0, 1]))
print("")
print("Should be low (0):")
print("Input [0,0] gives:")
print(test1.activate([0, 0]))
print("Input [1,1] gives:")