Exemplo n.º 1
0
    def initializeNN(self):
        for i in range(0, self.noInput):
            w = []
            wb = []
            if len(self.inputWeights) != 0 and len(self.inputBiasWeights) != 0:
                w = [self.inputWeights[i]]
                wb = self.inputBiasWeights[i]
            self.input[i] = (Neuron.Neuron(af.NoActivationFunction(), 1, w,
                                           wb))
        for i in range(0, self.noHiddenLayers):
            layer = [None] * self.noHidden

            for j in range(0, self.noHidden):
                # Find weights for current neuron
                w = []
                wb = []
                if len(self.hiddenWeights) != 0 and len(
                        self.hiddenBiasWeights) != 0:
                    if i == 0:
                        w = self.hiddenWeights[:self.noInput]
                    else:
                        start = self.noInput + (i - 1) * self.noHidden
                        end = self.noInput + i * self.noHidden
                        w = self.hiddenWeights[start:end]
                    wb = self.hiddenBiasWeights[i * self.noHidden + j]
                else:
                    # No weights present =>  initialise random neuron
                    pass
                # Add new neuron
                if i == 0:
                    layer[j] = (Neuron.Neuron(af.Logistic(), self.noInput, w,
                                              wb))
                else:
                    layer[j] = (Neuron.Neuron(af.Logistic(), self.noHidden, w,
                                              wb))

            self.hidden[i] = layer

        for i in range(0, self.noOutput):
            w = []
            wb = []
            if len(self.outputWeights) != 0 and len(
                    self.outputBiasWeights) != 0:
                w = self.outputWeights[i * self.noHidden:(i + 1) *
                                       self.noHidden]
                wb = self.outputBiasWeights[i]
            self.output[i] = (Neuron.Neuron(af.Logistic(), self.noHidden, w,
                                            wb))
Exemplo n.º 2
0
    def __init__(
        self,
        Neurons,
        LearningRate,
        activationFunction,
        errorFunction,
        name=None,
    ):
        self.Layer = []
        self.R = LearningRate
        self.outputs = [
        ]  # an array of activations for the layer for quick referencing
        self.neuronCount = 0
        if name == None:
            self.name = Layer.layerNum
        else:
            self.name = name
        if isinstance(Neurons, int):
            for i in range(Neurons):
                newNeuron = Neuron.Neuron(LearningRate, activationFunction,
                                          errorFunction)
                self.Layer.append(newNeuron)
                self.outputs.append(0)
                self.neuronCount += 1
        else:

            # Can break here because parameters NEED to be orders. Not robust

            for neuronParams in Neurons:
                if len(neuronParams) == 1:
                    newNeuron = Neuron.Neuron(neuronParams[0])
                elif len(neuronParams) == 2:
                    newNeuron = Neuron.Neuron(neuronParams[0], neuronParams[1])
                else:
                    newNeuron = Neuron.Neuron(neuronParams[0], neuronParams[1],
                                              neuronParams[2])
                self.Layer.append(newNeuron)
                self.outputs.append(0)
                self.neuronCount += 1
        Layer.layerNum += 1
 def test_neuron(self):
     neuron = Neuron(3)
     self.assertEqual(neuron.size, 3)
     self.assertEqual(neuron.delta, 0)
     self.assertEqual(len(neuron.weights), 3)
     self.assertEqual(len(neuron.inputs), 0)