Пример #1
0
def test_accuracy():
    accuracy = Accuracy()
    accuracy_with_logits = AccuracyWithLogits()
    top2 = TopKAccuracy(2)
    top3 = TopKAccuracy(3)
    fake_accuracy = AverageMetric(accuracy.score)

    # First try with a multi class input
    x = J.Variable(J.Tensor([[0.9, 0, 0.1, 0], [0.2, 0.3, 0.1, 0.4]]))
    # Labels are the indicies
    y = J.Variable(J.LongTensor([0, 0]))
    assert accuracy(x, y).item() == 50.
    assert fake_accuracy(x, y).item() == 50.
    # Since applying the softmax again won't change the ordering
    assert accuracy_with_logits(x, y) == 50.
    assert top2(x, y) == 50.
    assert top3(x, y) == 100.

    # Now try with binary class input
    x_logit = J.Variable(J.Tensor([[100.], [-100.]]))
    x = torch.sigmoid(x_logit)
    y = J.Variable(J.LongTensor([0, 0]))
    assert accuracy(x, y) == 50.
    assert fake_accuracy(x, y) == 50.
    assert accuracy_with_logits(x_logit, y) == 50.
Пример #2
0
def test_accumulation():
    accuracy = Accuracy()

    x = J.Variable(
        J.Tensor([[0.9, 0, 0.1, 0], [0.2, 0.3, 0.1, 0.4], [1.0, 0.0, 0.0,
                                                           0.0]]))
    y = J.Variable(J.LongTensor([1, 2, 3]))
    accuracy(x, y)
    x = J.Variable(J.Tensor([[0.9, 0, 0.1, 0], [0.2, 0.3, 0.1, 0.4]]))
    y = J.Variable(J.LongTensor([0, 0]))
    accuracy(x, y)
    assert accuracy.accumulate() == 20.
    accuracy = accuracy.reset()
    accuracy(x, y)
    assert accuracy.accumulate() == 50.
Пример #3
0
def multi_bce_with_logits_seq(outputs, targets, size_average=True):
    flat_outputs = torch.cat(outputs, dim=0).unsqueeze(1)  # B*Li x 1
    flat_targets = Variable(J.Tensor(np.zeros(
        (flat_outputs.size(0), ))))  # B*Li
    start = 0
    for i, output in enumerate(outputs):
        end = start + output.size(0)
        flat_targets[start:end] = targets[i].expand(output.size(0))
        start = end
    return bce_with_logits(flat_outputs,
                           flat_targets,
                           size_average=size_average)
Пример #4
0
def test_categorical_crossentropy():
    x = J.Variable(J.Tensor([[0.5, 0, 0.5, 0]]))
    y = J.Variable(J.LongTensor([0]))
    np.testing.assert_almost_equal(0.6931471824645996,
                                   categorical_crossentropy(x, y).item())
Пример #5
0
def test_flatten_3d_tensor():
    test_array = np.arange(2 * 3 * 4).reshape((2, 3, 4)).astype(np.float32)
    expected_array = np.arange(2 * 3 * 4).reshape(2, 3 * 4).astype(np.float32)
    test_tensor = J.Tensor(test_array)
    actual_array = J.flatten(test_tensor).cpu().numpy()
    np.testing.assert_array_equal(actual_array, expected_array)