Ejemplo n.º 1
0
 def feedforward(self, a):
     """Returns the output the network if a is the input. a must be a vector
     with the length of 784 which is the amount of pixels in a picture in
     the MNIST dataset"""
     for b, w in zip(self.biases, self.weights):
         a = sigmoid(np.dot(w, a) + b)
     return a
Ejemplo n.º 2
0
def backpropagate(network, x, y):
    """Calculates the gradient for the cost function"""
    nabla_b = [np.zeros(b.shape) for b in network.biases]
    nabla_w = [np.zeros(w.shape) for w in network.weights]

    activation = x
    activations = [x]
    zs = []
    """Forward"""
    for b, w in zip(network.biases, network.weights):
        z = np.dot(w, activation) + b
        zs.append(z)
        activation = sigmoid(z)
        activations.append(activation)
    """Backward"""
    delta = network.cost.delta(zs[-1], activations[-1], y)
    nabla_b[-1] = delta
    nabla_w[-1] = np.dot(delta, activations[-2].transpose())
    for i in range(2, network.num_layers):
        z = zs[-i]
        sp = sigmoid_prime(z)
        delta = np.dot(network.weights[-i + 1].transpose(), delta) * sp
        nabla_b[-i] = delta
        nabla_w[-i] = np.dot(delta, activations[-i - 1].transpose())
    return nabla_b, nabla_w
def test_sigmoid_function_returns_correct_value_on_zero():
    assert sigmoid(0) == 0.5
def test_sigmoid_function_returns_correct_value_on_upper_bound():
    assert sigmoid(37) == 1.0
def test_sigmoid_function_returns_correct_value_on_lower_bound():
    assert sigmoid(-10) < 0.01