tree.fit(X_train_hastie, y_train_hastie) print('Accuracy for the Decision Tree: {}'.format( tree.score(X_test_hastie, y_test_hastie))) gbc2_model = GradientBoostingClassifier(n_estimators=5000, learning_rate=0.01, max_depth=3, random_state=0) gbc2_model.fit(X_train_hastie, y_train_hastie) y_pred = gbc2_model.predict_proba(X_test_hastie)[:, 1] print('Accuracy for Gradient Boosting: {}'.format( gbc2_model.score(X_test_hastie, y_test_hastie))) gbc2_cumulative_predictions = numpy.array( [x for x in gbc2_model.staged_decision_function(X_test_hastie)])[:, :, 0] print_loss(gbc2_cumulative_predictions, y_test_hastie) xg_model = XGBClassifier(n_estimators=5000, learning_rate=0.01) #print(xg_model) xg_model.fit(X_train_hastie, y_train_hastie) print('Accuracy for XGBoost: {}'.format(xg_model.score(X_test_hastie, y_test_hastie))) y_pred = xg_model.predict_proba(X_test_hastie)[:, 1] # Attetion: This will not work xg_cumulative_predictions = numpy.array( [x for x in xg_model.staged_decision_function(X_test_hastie)])[:, :, 0] print_loss(xg_cumulative_loss)