Пример #1
0
    def test_forward_backward_1_no_activation(self):
        x = np.arange(6).reshape(3, 2)
        x_variable = Variable(x)
        fc = FullyConnectedLayer(2, 1, Identity, x_variable)
        w = fc._w._value.copy()
        b = fc._b._value.copy()
        wxb_desired = x @ w + b
        wxb_actual = fc.forward()

        np.testing.assert_almost_equal(wxb_actual, wxb_desired)

        fc.backward(np.array([[6.0], [7.0], [8.0]]))

        dl_dw_actual = fc._w.get_gradient()
        dl_dx_actual = x_variable.get_gradient()
        dl_dw_desired = np.array([[0 * 6 + 2 * 7 + 4 * 8], [1 * 6 + 3 * 7 + 5 * 8]])
        dl_dx_desired = np.array([[w[0,0] * 6, w[1,0] * 6], [w[0,0] * 7, w[1,0] * 7], [w[0,0] * 8, w[1,0] * 8]])

        np.testing.assert_allclose(dl_dw_actual, dl_dw_desired)
        np.testing.assert_allclose(dl_dx_actual, dl_dx_desired)

        dl_db_actual = fc._b.get_gradient()
        dl_db_desired = np.array([6 + 7 + 8])

        np.testing.assert_allclose(dl_db_actual, dl_db_desired)
Пример #2
0
    def test_forward_backward_4_no_activation(self):
        x = np.arange(6).reshape(3, 2)
        x_variable = Variable(x)
        fc = FullyConnectedLayer(2, 4, Identity, x_variable)
        w = fc._w._value.copy()
        b = fc._b._value.copy()
        wxb_desired = x @ w + b
        wxb_actual = fc.forward()

        np.testing.assert_almost_equal(wxb_actual, wxb_desired)

        dl_dxwb = np.arange(6, 6 + 3 * 4).reshape(3, 4)

        fc.backward(dl_dxwb)

        dl_dw_actual = fc._w.get_gradient()
        dl_dw_desired = np.array([
            [x[:, 0].T @ dl_dxwb[:, 0], x[:, 0].T @ dl_dxwb[:, 1], x[:, 0].T @ dl_dxwb[:, 2],
             x[:, 0].T @ dl_dxwb[:, 3]],
            [x[:, 1].T @ dl_dxwb[:, 0], x[:, 1].T @ dl_dxwb[:, 1], x[:, 1].T @ dl_dxwb[:, 2],
             x[:, 1].T @ dl_dxwb[:, 3]],
        ])

        np.testing.assert_allclose(dl_dw_actual, dl_dw_desired)