def test_mean_squared_error_forward_zero_loss():
    data = Graph(init([0, 0, 0, 1]))
    label = Graph(init([0, 0, 0, 1]))

    mse = mean_squared_error(data, label)

    assert float(mse.data) == 0
def test_sigmoid_forward():
    data = init([[-0.22342056, 0.6927312], [0.4227562, -0.59764487],
                 [0.7870561, 0.372502]])

    sigmoid_output = sigmoid(Graph(data))
    desired = init([[0.44437608, 0.66657424], [0.6041426, 0.3548827],
                    [0.6871989, 0.5920634]])

    np.testing.assert_allclose(sigmoid_output.data, desired)
Beispiel #3
0
def test_add_backward():
    data = init([2])
    gradient = init([1])

    add_function = Add()
    computed_gradients_1, computed_gradients_2 = add_function.backward(gradient)

    data_copy = np.copy(data)
    f = lambda: add_function.internal_forward((data, data_copy))
    numerical_gradients_1, numerical_gradients_2 = gradient_checker.compute_numerical_gradient(f, (data, data_copy), (gradient,))

    gradient_checker.assert_allclose(computed_gradients_1, numerical_gradients_1)
    gradient_checker.assert_allclose(computed_gradients_2, numerical_gradients_2)
def fixed_case(with_label=False):
    data = init([
        [0, 0, 0, 1],
        [1, 0, 0, 1],
        [2, 0, 1, 0],
    ])

    if with_label:
        data_2 = np.array([3, 0, 1], dtype=np.int32)
    else:
        data_2 = init([[1, 0, 0, 1], [0, 0, 0, 1], [0, 0, 0, 0]])

    return data, data_2
Beispiel #5
0
def test_relu_forward():
    data = init([[-0.620304, -0.1285682, 0.4867715, 0.09824127],
                 [-0.37919873, -0.9272095, -0.0704312, 0.35593647],
                 [0.19380952, 0.06425636, 0.21729442, -0.3168534],
                 [-0.62586236, -0.4846, 0.84347826, 0.22025743],
                 [0.02966821, -0.2127131, -0.33760294, -0.9477733]])

    desired = init([[0., 0., 0.4867715, 0.09824127], [0., 0., 0., 0.35593647],
                    [0.19380952, 0.06425636, 0.21729442, 0.],
                    [0., 0., 0.84347826, 0.22025743], [0.02966821, 0., 0.,
                                                       0.]])

    relu_output = relu(Graph(data))
    np.testing.assert_allclose(relu_output.data, desired)
Beispiel #6
0
def test_sgd():
    learning_rate = 0.0017
    optimizer = SGD(learning_rate)

    gradients = init(
        [[0.78266141, 0.87160521, 0.91545263, 0.41808932, 0.63775016],
         [0.16893565, 0.25077806, 0.88390805, 0.92372049, 0.0741453],
         [0.63734837, 0.28873811, 0.20229677, 0.12343409, 0.08427269]])

    desired = init(
        [[0.00133052, 0.00148173, 0.00155627, 0.00071075, 0.00108418],
         [0.00028719, 0.00042632, 0.00150264, 0.00157032, 0.00012605],
         [0.00108349, 0.00049085, 0.0003439, 0.00020984, 0.00014326]])

    deltas, = optimizer.run_update_rule((gradients, ), None)
    gradient_checker.assert_allclose(deltas, desired)
def test_softmax_cross_entropy_backward():
    data, labels = get_data()
    gradient = init([2])

    loss_function = SoftmaxCrossEntropy()
    loss_function(Graph(data), Graph(labels))
    computed_gradient_data, computed_gradient_label = loss_function.backward(gradient)
    assert computed_gradient_label is None

    f = lambda: loss_function.internal_forward((data, labels))
    numerical_gradient_data, _ = gradient_checker.compute_numerical_gradient(f, (data, labels), (gradient,), eps=1e-2)

    gradient_checker.assert_allclose(computed_gradient_data, numerical_gradient_data, atol=1e-4)
def fixed_case():
    data = init([
        [0.44,  0.06, 0.33,  0.76],
        [0.53,  0.65, 0.06, -0.35],
        [0.29, -0.90, 0.86,  0.76],
    ])

    weights = init([
        [-0.74, -0.44],
        [-0.51,  0.63],
        [ 0.73, -0.38],
        [ 0.24, -0.43],
    ])

    bias = init([0.86, 0.63])

    expected = init([
        [0.9271,  0.022],
        [0.0961,  0.934],
        [1.9146, -0.7182],
    ])

    return data, weights, bias, expected
Beispiel #9
0
def test_sum_backward():
    data = np.random.uniform(-1, 1, (3, 2)).astype(constants.DTYPE)
    gradient = init([2])

    data_graph = Graph(data)
    sum_function = Sum()
    sum_function(data_graph)
    computed_gradients, = sum_function.backward((gradient, ))

    f = lambda: sum_function.internal_forward((data, ))
    numerical_gradients, = gradient_checker.compute_numerical_gradient(
        f, (data, ), (gradient, ))

    gradient_checker.assert_allclose(computed_gradients,
                                     numerical_gradients,
                                     atol=1e-4,
                                     rtol=1e-3)
def test_mean_squared_error_backward_with_label():
    data, data_2 = fixed_case(with_label=True)
    gradients = init([2])

    data_1_graph = Graph(data)
    data_2_graph = Graph(data_2)

    mse_function = MeanSquaredError()
    mse_function(data_1_graph, data_2_graph)
    computed_gradient_1, computed_gradient_2 = mse_function.backward(gradients)
    assert computed_gradient_2 is None

    f = lambda: mse_function.internal_forward((data, data_2))
    numerical_gradient_1, _ = gradient_checker.compute_numerical_gradient(
        f, (data, data_2), (gradients, ))

    gradient_checker.assert_allclose(computed_gradient_1, numerical_gradient_1)
Beispiel #11
0
def test_add_forward():
    data = init([2])

    result = add(Graph(data), Graph(data))
    assert result.data == 4
Beispiel #12
0
def get_base_data():
    first_element = init([-10, -10, -5, -4, 5, -1])
    second_element = init([-10, -10, 7, 3, 4, -1])
    data = np.stack((first_element, second_element))
    labels = np.array([4, 0])
    return data, labels