Пример #1
0
 def test_sigmoid_gradient(self):
     z = array([-1, -0.5, 0, 0.5, 1])
     assert_allclose(g_grad(z),
                     [0.196612, 0.235004, 0.25, 0.235004, 0.196612],
                     rtol=0,
                     atol=0.001,
                     equal_nan=False)
Пример #2
0
def back_propagation(y, theta, a, z, num_labels, n_hidden_layers=1):
    """Applies back propagation to minimize model's loss.

    Args:
        y (numpy.array): Column vector of expected values.
        theta (numpy.array(numpy.array)): array of model's weight matrices by
            layer.
        a (numpy.array(numpy.array)): array of activation matrices by layer.
        z (numpy.array(numpy.array)): array of parameters prior to sigmoid by
            layer.
        num_labels (int): Number of classes in multiclass classification.
        n_hidden_layers (int): Number of hidden layers in network.

    Returns:
        numpy.array(numpy.array): array of matrices of 'error values' by layer.
    """
    delta = empty((n_hidden_layers + 2), dtype=object)
    L = n_hidden_layers + 1  # last layer
    delta[L] = zeros(shape=a[L].shape, dtype=float64)

    for c in range(num_labels):
        delta[L][:, c] = a[L][:, c] - (y == c)

    for l in range(L, 1, -1):
        delta[l - 1] = delta[l].dot(theta[l - 1])[:, 1:] * g_grad(z[l - 1])

    return delta