-
Notifications
You must be signed in to change notification settings - Fork 0
/
AB.py
61 lines (43 loc) · 1.58 KB
/
AB.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
import numpy as np
import sys
from sklearn.tree import DecisionTreeClassifier
# Multiclass Adaboost Implementation (1 vs. All)
class AdaBoost:
def __init__(self, dep, itr, names):
self.itr = itr
self.dep = dep
self.names = names
self.C = []
self.A = []
def Train(self, X, y):
N = X.shape[0]
New = np.zeros(N)
for i in self.names:
A = []
C = []
weight = np.ones(N)/N
New[np.where(y == i)[0]] = 1
New[np.where(y != i)[0]] = -1
for j in range(self.itr):
# Input a generic sklearn classifier
clf = DecisionTreeClassifier(max_depth = self.dep)
clf.fit(X, New, sample_weight = weight)
Pre = clf.predict(X)
err = weight.dot((New != Pre).astype(int))
if (err != 0):
A.append(.5*np.log((1-err)/err))
else:
A.append(1)
C.append(clf)
weight *= np.exp(-A[j]*New*Pre)
weconight = weight/np.sum(weight)
self.C.append(C)
self.A.append(A)
def Test(self, X):
N = X.shape[0]
M = self.names.shape[0]
votes = np.zeros([N,M])
for i in range(len(self.names)):
for j in range(self.itr):
votes[:,i] += self.A[i][j]*self.C[i][j].predict(X)
return self.names[np.argmax(votes,axis=1)]