示例#1
0
    max_depth=1, max_leaf_nodes=2),
                           algorithm='SAMME',
                           n_estimators=no_base_classifiers,
                           learning_rate=1.0)

## CV
kf = KFold(n_splits=no_folds)
cv_acc_arr = []
cv_sk_acc_arr = []
i = 0
for train_ind, test_ind in kf.split(X_train):
    print("cross split no", i)
    x_tr, x_te = X_train.copy()[train_ind], X_train.copy()[test_ind]
    y_tr, y_te = y_train.copy()[train_ind], y_train.copy()[test_ind]

    f.init(x_tr, y_tr)
    f.train(no_base_classifiers)
    y_predict = f.predict(x_te)
    accuracy = np.mean(y_predict == y_te)
    cv_acc_arr.append(accuracy)

    ## comparing sklearn implementation of boost
    boost.fit(x_tr, y_tr)
    y_pred = boost.predict(x_te)
    accuracy_sk = np.mean(y_pred == y_te)
    cv_sk_acc_arr.append(accuracy_sk)

    i += 1

print(np.mean(cv_acc_arr))
print(np.mean(cv_sk_acc_arr))
示例#2
0
文件: tmp.py 项目: avadesh02/FML-A3
    cm_bright = ListedColormap(['#FF0000', '#0000FF'])

    #Get current axis and plot
    if ax is None:
        ax = plt.gca()
    ax.contourf(xx, yy, Z, 2, cmap='RdBu', alpha=.5)
    ax.contour(xx, yy, Z, 2, cmap='RdBu')
    ax.scatter(X[:, 0], X[:, 1], c=y, cmap=cm_bright, s=scatter_weights * 40)
    ax.set_xlabel('$X_1$')
    ax.set_ylabel('$X_2$')


boost = AdaBoostClassifier(base_estimator=DecisionTreeClassifier(
    max_depth=1, max_leaf_nodes=2),
                           algorithm='SAMME',
                           n_estimators=10,
                           learning_rate=1.0)
boost.fit(X, y)
# plot_decision_boundary(boost, X,y, N = 50)#, weights)
# plt.show()

print(boost.score(X, y))
###
h = DecisionTreeClassifier(max_depth=1, max_leaf_nodes=2)
f = AdaBoost(h)
f.init(X, y)
f.train(10)
y_pred = boost.predict(X)
accuracy = np.mean(y_pred == y)
print(accuracy)