Example #1
0
    def __call__(self, x, y_true):
        """ Parameters
            ----------
            x : mygrad.Tensor, shape=(N, C)
                The C class scores for each of the N pieces of data.

            y_true : Sequence[int]
                The correct class-indices, in [0, C), for each datum.

            Returns
            -------
            The average softmax loss"""
        if isinstance(y_true, Tensor):
            y_true = y_true.data

        check_loss_inputs(x, y_true)
        self.variables = (x,)
        scores = x.data
        log_softmax = scores - logsumexp(scores, axis=-1, keepdims=True)
        label_locs = (range(len(scores)), y_true)
        loss = -np.sum(log_softmax[label_locs]) / scores.shape[0]

        self.back = np.exp(log_softmax)
        self.back[label_locs] -= 1.0
        self.back /= scores.shape[0]
        return loss
Example #2
0
def test_logsumexp(data: st.SearchStrategy, x: np.ndarray, keepdims: bool):
    axes = data.draw(valid_axes(ndim=x.ndim), label="axes")
    mygrad_result = logsumexp(x, axis=axes, keepdims=keepdims)
    scipy_result = special.logsumexp(x, axis=axes, keepdims=keepdims)
    assert_array_equal(
        mygrad_result,
        scipy_result,
        err_msg="mygrad's implementation of logsumexp does "
        "not match that of scipy's",
    )