Example #1
0
 def test_binary_cross_entropy_pytorch(self):
     """Test BinaryCrossEntropy."""
     loss = losses.BinaryCrossEntropy()
     outputs = torch.tensor([[0.1, 0.8], [0.4, 0.6]])
     labels = torch.tensor([[0.0, 1.0], [1.0, 0.0]])
     result = loss._create_pytorch_loss()(outputs, labels).numpy()
     expected = [
         -np.mean([np.log(0.9), np.log(0.8)]),
         -np.mean([np.log(0.4), np.log(0.4)])
     ]
     assert np.allclose(expected, result)
Example #2
0
 def test_binary_cross_entropy_tf(self):
     """Test BinaryCrossEntropy."""
     loss = losses.BinaryCrossEntropy()
     outputs = tf.constant([[0.1, 0.8], [0.4, 0.6]])
     labels = tf.constant([[0.0, 1.0], [1.0, 0.0]])
     result = loss._compute_tf_loss(outputs, labels).numpy()
     expected = [
         -np.mean([np.log(0.9), np.log(0.8)]),
         -np.mean([np.log(0.4), np.log(0.4)])
     ]
     assert np.allclose(expected, result)