示例#1
0
    def test_softmax_calculate_gradient(self):
        # Given
        pre_activation = np.array([[1, 2, 3, 6], [2, 4, 5, 6], [3, 8, 7, 6]])
        target = np.array([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0]])
        softmax = Softmax()

        # When
        activation = softmax.apply_activation(pre_activation)
        grad = softmax.calculate_gradient(activation, target)