def backpropagate(self, source): """ Performs the backpropagation of error through this neuron. This method will collect all of the errors from its output neurons and gated neurons before calculating performing weight adjustments. """ # Check off source. try: self.outputSet.remove(source) except KeyError: pass # Only continue if we have been notified by all output neurons. if len(self.outputSet) == 0: # Reset output set. self.resetOutputSet() ## Projected error. self.projectedError = \ calculateProjectedError(self.derivative, self.projectedConnections) ## Gate error. gateError = calculateGateError(self.derivative, self.gatedConnections) # Calculate total error. self.error = gateError + self.projectedError # Backpropagate. for neuron in self.inputSet: neuron.backpropagate(self)
def test_projected_error(self): # Zero derivative error = functions.calculateProjectedError(0.0, [self.c1]) self.assertAlmostEqual(0.0, error) # One ungated connection. error = functions.calculateProjectedError(self.derivative, [self.c1]) self.assertAlmostEqual(22.5, error) # Two ungated connections. error = functions.calculateProjectedError(self.derivative, [self.c1, self.c2]) self.assertAlmostEqual(62.5, error) # One ungated and one gated connection. error = functions.calculateProjectedError(self.derivative, [self.c1, self.g1]) self.assertAlmostEqual(97.5, error)