def predict(self, logits, temperature=None): """Scales logits based on the temperature, returns calibrated probabilities. Args: logits: raw (non-normalized) predictions that a classification model generates, shape=(num_examples, num_classes) temperature: temperature to scale logits by Returns: calibrated softmax probabilities shape=(num_samples, num_classes) """ if not temperature: return utils.to_softmax(logits / self.temperature) else: return utils.to_softmax(logits / temperature)
def fit(self, logits, one_hot_labels): """Fit the isotonic regression model. Args: logits: raw (non-normalized) predictions that a classification model generates, shape=(num_examples, num_classes) one_hot_labels: one-hot-encoding of true labels, shape=(num_examples, num_classes) """ assert logits.shape[1] == self.num_classes assert logits.shape == one_hot_labels.shape softmax_probabilities = utils.to_softmax(logits) for i in range(self.num_classes): self.ir_per_class[i].fit(softmax_probabilities[:, i], one_hot_labels[:, i])
def predict(self, logits): """Predict new softmax probabilities from logit scores. Uses linear interpolation in underlying scikit learn call. Args: logits: raw (non-normalized) predictions that a classification model generates, shape=(num_examples, num_classes) Returns: calibrated softmax probabilities, shape = (num_examples, num_classes) """ assert logits.shape[1] == self.num_classes input_probabilities = utils.to_softmax(logits) new_probabilities = np.ones(np.shape(input_probabilities)) for i in range(self.num_classes): new_probabilities[:, i] = self.ir_per_class[i].predict( input_probabilities[:, i]) # normalize each row of the probability vector if in multiclass setting if self.num_classes > 1: row_sums = np.sum(new_probabilities, axis=1) return new_probabilities / row_sums[:, np.newaxis] else: return new_probabilities