示例#1
0
def test_softmax(t):
    q = minitorch.softmax(t, 3)
    x = q.sum(dim=3)
    assert_close(x[0, 0, 0, 0], 1.0)

    q = minitorch.softmax(t, 1)
    x = q.sum(dim=1)
    assert_close(x[0, 0, 0, 0], 1.0)

    minitorch.grad_check(lambda a: minitorch.softmax(a, dim=2), t)
示例#2
0
def test_log_softmax(t):
    q = minitorch.softmax(t, 3)
    q2 = minitorch.logsoftmax(t, 3).exp()
    for i in q._tensor.indices():
        assert_close(q[i], q2[i])

    minitorch.grad_check(lambda a: minitorch.logsoftmax(a, dim=2), t)
示例#3
0
def test_softmax(t):
    t = minitorch.tensor_fromlist([
        [
            [
                [0.00, 0.00, 0.00, 0.00],
                [0.00, 0.00, 0.00, 0.00],
                [0.00, 0.00, 0.00, 0.00],
                [0.00, 0.00, 0.00, 0.00]]]])
    q = minitorch.softmax(t, 2)
    print('=====t')
    print(t)
    print('=====q')
    print(q)
    x = q.sum(dim=3)
    print('=====x')
    print(x)
    assert_close(x[0, 0, 0, 0], 1.0)

    q = minitorch.softmax(t, 1)
    x = q.sum(dim=1)
    assert_close(x[0, 0, 0, 0], 1.0)

    minitorch.grad_check(lambda a: minitorch.softmax(a, dim=2), t)
示例#4
0
def test_log_softmax(t):
    q = minitorch.softmax(t, 2)
    logmax_q = minitorch.logsoftmax(t, 2).exp()
    assert_close(logmax_q[0, 0, 0], q[0, 0, 0])