Example #1
0
    def initializeNN(self):
        for i in range(0, self.noInput):
            w = []
            wb = []
            if len(self.inputWeights) != 0 and len(self.inputBiasWeights) != 0:
                w = [self.inputWeights[i]]
                wb = self.inputBiasWeights[i]
            self.input[i] = (Neuron.Neuron(af.NoActivationFunction(), 1, w,
                                           wb))
        for i in range(0, self.noHiddenLayers):
            layer = [None] * self.noHidden

            for j in range(0, self.noHidden):
                # Find weights for current neuron
                w = []
                wb = []
                if len(self.hiddenWeights) != 0 and len(
                        self.hiddenBiasWeights) != 0:
                    if i == 0:
                        w = self.hiddenWeights[:self.noInput]
                    else:
                        start = self.noInput + (i - 1) * self.noHidden
                        end = self.noInput + i * self.noHidden
                        w = self.hiddenWeights[start:end]
                    wb = self.hiddenBiasWeights[i * self.noHidden + j]
                else:
                    # No weights present =>  initialise random neuron
                    pass
                # Add new neuron
                if i == 0:
                    layer[j] = (Neuron.Neuron(af.Logistic(), self.noInput, w,
                                              wb))
                else:
                    layer[j] = (Neuron.Neuron(af.Logistic(), self.noHidden, w,
                                              wb))

            self.hidden[i] = layer

        for i in range(0, self.noOutput):
            w = []
            wb = []
            if len(self.outputWeights) != 0 and len(
                    self.outputBiasWeights) != 0:
                w = self.outputWeights[i * self.noHidden:(i + 1) *
                                       self.noHidden]
                wb = self.outputBiasWeights[i]
            self.output[i] = (Neuron.Neuron(af.Logistic(), self.noHidden, w,
                                            wb))
class NeuronTestcase(unittest.TestCase):
    def setUp(self):
        self.neuron = Neuron(['a', 'b', 'c'])

        self.neuron.input_weights['a'] = 0.25
        self.neuron.input_weights['b'] = 0.50
        self.neuron.input_weights['c'] = 0.75

    def test_calc(self):
        self.assertAlmostEqual(self.neuron.calc({'a': 1.0, 'b': 0.0, 'c': 0.0}), 0.562177, 4)
        self.assertAlmostEqual(self.neuron.calc({'a': 0.0, 'b': 1.0, 'c': 0.0}), 0.622459, 4)
        self.assertAlmostEqual(self.neuron.calc({'a': 0.0, 'b': 0.0, 'c': 1.0}), 0.679179, 4)

        self.assertAlmostEqual(self.neuron.calc({'a': 1.0, 'b': 1.0, 'c': 1.0}), 0.817574, 4)
        self.assertAlmostEqual(self.neuron.calc({'a': 1.0, 'b': 1.0, 'c': 0.0}), 0.679179, 4)
        self.assertAlmostEqual(self.neuron.calc({'a': 1.0, 'b': 0.0, 'c': 1.0}), 0.731059, 4)
        self.assertAlmostEqual(self.neuron.calc({'a': 0.0, 'b': 1.0, 'c': 1.0}), 0.7773, 4)
    def test_sigma_derivative_with_sigma(self):
        for x in range(-5, 5):
            alternative = Neuron.sigma(x) - Neuron.sigma(x) * Neuron.sigma(x)
            self.assertAlmostEqual(Neuron.sigma_derivative(x), alternative, 5)

        for x in range(-5, 5):
            alternative = Neuron.sigma(x) * (1 - Neuron.sigma(x))
            self.assertAlmostEqual(Neuron.sigma_derivative(x), alternative, 5)
    def test_equal(self):
        self.assertTrue(Neuron.equal(0.0, 0.0))
        self.assertTrue(Neuron.equal(0.0, 0.01))
        self.assertTrue(Neuron.equal(0.0, 0.05))
        self.assertTrue(Neuron.equal(1.0, 0.95))
        self.assertTrue(Neuron.equal(1.0, 1.05))

        self.assertFalse(Neuron.equal(0.0, 1.0))
        self.assertFalse(Neuron.equal(1.0, 0.0))
Example #5
0
    def __init__(
        self,
        Neurons,
        LearningRate,
        activationFunction,
        errorFunction,
        name=None,
    ):
        self.Layer = []
        self.R = LearningRate
        self.outputs = [
        ]  # an array of activations for the layer for quick referencing
        self.neuronCount = 0
        if name == None:
            self.name = Layer.layerNum
        else:
            self.name = name
        if isinstance(Neurons, int):
            for i in range(Neurons):
                newNeuron = Neuron.Neuron(LearningRate, activationFunction,
                                          errorFunction)
                self.Layer.append(newNeuron)
                self.outputs.append(0)
                self.neuronCount += 1
        else:

            # Can break here because parameters NEED to be orders. Not robust

            for neuronParams in Neurons:
                if len(neuronParams) == 1:
                    newNeuron = Neuron.Neuron(neuronParams[0])
                elif len(neuronParams) == 2:
                    newNeuron = Neuron.Neuron(neuronParams[0], neuronParams[1])
                else:
                    newNeuron = Neuron.Neuron(neuronParams[0], neuronParams[1],
                                              neuronParams[2])
                self.Layer.append(newNeuron)
                self.outputs.append(0)
                self.neuronCount += 1
        Layer.layerNum += 1
 def test_sigma(self):
     self.assertTrue(Neuron.equal(Neuron.sigma(0.0), 0.5))
     self.assertTrue(Neuron.equal(Neuron.sigma(1.0), 0.75))
     self.assertTrue(Neuron.equal(Neuron.sigma(-1.0), 0.25))
    def setUp(self):
        self.neuron = Neuron(['a', 'b', 'c'])

        self.neuron.input_weights['a'] = 0.25
        self.neuron.input_weights['b'] = 0.50
        self.neuron.input_weights['c'] = 0.75
 def test_sigma_derivative(self):
     self.assertTrue(Neuron.equal(Neuron.sigma_derivative(0.0), 0.25))
     self.assertTrue(Neuron.equal(Neuron.sigma_derivative(-5), 0.0))
     self.assertTrue(Neuron.equal(Neuron.sigma_derivative(+5), 0.0))
 def test_neuron(self):
     neuron = Neuron(3)
     self.assertEqual(neuron.size, 3)
     self.assertEqual(neuron.delta, 0)
     self.assertEqual(len(neuron.weights), 3)
     self.assertEqual(len(neuron.inputs), 0)