/
calibration_methods.py
66 lines (54 loc) · 1.89 KB
/
calibration_methods.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
import numpy as np
from sklearn.isotonic import IsotonicRegression
from sklearn.linear_model import LogisticRegression
# TODO implement a MLP calibration method
#from sklearn.neural_network import MLPClassifier
from presentationtier import PresentationTier
import matplotlib.pyplot as plt
plt.ion()
plt.rcParams['image.cmap'] = 'gray'
plt.rcParams['figure.figsize'] = (5,3.5)
np.random.seed(1234)
_EPSILON=10e-8
# Laplace correction
alpha=0.1
#x = T.ivector('x') # the data is presented as rasterized images
#y = T.ivector('y') # the labels are presented as 1D vector of
#
#classifier = MLP(
# rng=np.random.RandomState(42),
# input=x,
# n_in=1,
# n_hidden=[3],
# n_out=1
#)
S = np.tile(np.linspace(0.01, 0.99, 10), 10)
Y = np.asarray([0,0,0,0,0,0,0,0,1,1,
0,0,0,0,0,0,0,0,1,1,
0,0,0,0,0,0,0,0,1,1,
0,0,0,0,0,0,0,0,1,1,
0,0,0,0,0,0,0,0,1,1,
1,0,0,0,0,0,1,1,1,1,
1,0,0,0,0,0,1,1,1,1,
1,0,0,0,0,0,1,1,1,1,
1,0,0,0,0,0,1,1,1,1,
1,0,0,0,0,0,1,1,1,1])
print('Learning Isotonic Regression')
ir = IsotonicRegression(increasing=True, out_of_bounds='clip',
y_min=_EPSILON, y_max=(1-_EPSILON))
ir.fit(S, Y)
print('Learning Logistic Regression')
lr = LogisticRegression(C=1., solver='lbfgs')
lr.fit(S.reshape(-1,1), Y)
scores_set = [S, ir.predict(S), lr.predict_proba(S.reshape(-1,1))[:,1]]
labels_set = [Y, Y, Y]
legend = ['Y', 'IR', 'LR']
pt = PresentationTier()
fig = pt.plot_reliability_diagram(scores_set, labels_set, legend,
original_first=True, alpha=alpha)
scores_lin = np.linspace(0,1,100)
scores_set = [S, scores_lin, scores_lin]
prob_set = [Y, ir.predict(scores_lin),
lr.predict_proba(scores_lin.reshape(-1,1))[:,1]]
fig = pt.plot_reliability_map(scores_set, prob_set, legend,
original_first=True, alpha=alpha)