def test_np_inverse_softmax(self): batch_size, nclasses = [4, 3] logits_orig = np.random.rand(batch_size, nclasses) probs_orig = scipy.special.softmax(logits_orig, axis=-1) logits_new = uq_utils.np_inverse_softmax(probs_orig) probs_new = scipy.special.softmax(logits_new, axis=-1) self.assertAllClose(probs_orig, probs_new)
def apply_temperature_scaling(temperature, probs): """Apply temperature scaling to an array of probabilities. Args: temperature: Floating point temperature. probs: Array of probabilities with probabilities over axis=-1. Returns: Temperature-scaled probabilities; same shape as input probs. """ logits_t = uq_utils.np_inverse_softmax(probs).T / temperature return scipy.special.softmax(logits_t.T, axis=-1)
def run(prediction_path): """Run temperature scaling.""" stats = array_utils.load_stats_from_tfrecords(prediction_path) probs = stats['probs'].astype(np.float32) labels = stats['labels'].astype(np.int32) if len(labels.shape) > 1: labels = np.squeeze(labels, -1) if probs.shape[0] > NUM_EXAMPLES: probs = probs[:NUM_EXAMPLES, :] labels = labels[:NUM_EXAMPLES] probs = metrics_lib.soften_probabilities(probs=probs) logits = uq_utils.np_inverse_softmax(probs) temp = calibration_lib.find_scaling_temperature(labels, logits) with gfile.GFile( os.path.join(os.path.dirname(prediction_path), 'temperature_hparam.json'), 'w') as fh: fh.write(json.dumps({'temperature': temp}))