Exemplo n.º 1
0
def test_crossentropy():
    predictions = np.array([0.1, 0.5, 0.7, 0.4])
    probabilities = softmax(predictions)
    for i in range(len(predictions)):
        ce = crossentropy(logits=predictions, label=i)
        assert ce == approx(-np.log(probabilities[i]))
Exemplo n.º 2
0
def test_softmax():
    predictions = np.array([0.1, 0.5, 0.7, 0.4])
    probabilities = softmax(predictions)
    assert not np.sum(predictions) == approx(1.)
    assert np.sum(probabilities) == approx(1.)
 def score(Its):
     Its = np.stack(Its)
     Its = unnormalize(Its)
     batch_logits, _ = a.batch_predictions(Its, strict=False)
     scores = [softmax(logits)[cI] for logits in batch_logits]
     return scores