def test_log_loss_from_logits(self): """Test computing cross-entropy loss from logits.""" logits = np.array([[1, 2, 0, -1], [1, 2, 0, -1], [-1, 3, 0, 0]]) labels = np.array([0, 3, 1]) expected_loss = np.array([1.4401897, 3.4401897, 0.11144278]) loss = utils.log_loss_from_logits(labels, logits) np.testing.assert_allclose(expected_loss, loss, atol=1e-7)
def get_loss_test(self): """Calculates (if needed) cross-entropy losses for the test set.""" if self.loss_test is None: if self.logits_train is not None: self.loss_test = utils.log_loss_from_logits(self.labels_test, self.logits_test) else: self.loss_test = utils.log_loss(self.labels_test, self.probs_test) return self.loss_test
def get_loss_test(self): """Calculates (if needed) cross-entropy losses for the test set. Returns: Loss (or None if neither the loss nor the labels are present). """ if self.loss_test is None: if self.labels_test is None: return None if self.logits_test is not None: self.loss_test = utils.log_loss_from_logits( self.labels_test, self.logits_test) else: self.loss_test = utils.log_loss(self.labels_test, self.probs_test) return self.loss_test
def get_loss_train(self): """Calculates (if needed) cross-entropy losses for the training set.""" if self.loss_train is None: self.loss_train = utils.log_loss_from_logits( self.labels_train, self.logits_train) return self.loss_train