def test_dsoftmax_matrix():
    tensor_shape = [random.randint(2, 10) for _ in range(2)]

    helpers.check_gradient(lambda X: calculate.softmax(X),
                           lambda X: calculate.dsoftmax(calculate.softmax(X)),
                           f_arg_tensor=numpy.random.random(tensor_shape),
                           f_shape='jac-stack')
Example #2
0
    def derivative(self, input_vec, output_vec):
        """Return the derivative of this function.

        We take both input and output vector because
        some derivatives can be more efficiently calculated from
        the output of this function.
        """
        return calculate.dsoftmax(output_vec)
Example #3
0
def test_dsoftmax():
    helpers.check_gradient(calculate.softmax,
                           lambda x: calculate.dsoftmax(calculate.softmax(x)),
                           f_shape='jac')