#('gamma',0.0), # bigger -> more conservative #('min_child_weight',1), ('objective','reg:linear'), ('eval_metric', 'rmse'), ] watchlist = [(dtrain,'train'),(dtest,'eval')] num_round = 1000 evals_result = {} for i_max_depth in range(1, 2): param[0]=('max_depth',i_max_depth) #zmiana glebokosci drzewa print param #bst = xgb.train(param, dtrain, num_round, watchlist, evals_result=evals_result, early_stopping_rounds=100) bst = xgb.train(param, dtrain, num_round, watchlist, feval=common.feval_class_round, evals_result=evals_result, early_stopping_rounds=100) preds = bst.predict(dtest,ntree_limit=bst.best_ntree_limit) preds_class = common.round_all(preds); print(sk.metrics.classification_report(dtest.get_label(),preds_class)) f1.write("{0}\n".format(round(sk.metrics.accuracy_score(dtest.get_label(),preds_class),2))); f2.write('Regresja na klasach,'+str(i_max_depth)+','+common.prepare_file_output(dtest.get_label(),preds_class)+"\n"); f1.close(); f2.close(); # PLOT #ax = xgb.plot_tree(bst,num_trees=0) #xgb.plot_importance(bst) #plt.show() #bst.dump_model('dump_raw.txt');
num_round = 1000 evals_result = {} for i_max_depth in range(1, 11): param[0] = ('max_depth', i_max_depth) #zmiana glebokosci drzewa print param #bst = xgb.train(param, dtrain, num_round, watchlist, evals_result=evals_result, early_stopping_rounds=100) bst = xgb.train(param, dtrain, num_round, watchlist, feval=common.feval_value, evals_result=evals_result, early_stopping_rounds=100) preds = bst.predict(dtest, ntree_limit=bst.best_ntree_limit) preds_class = common.value_to_class_all(preds) dtest_labels_class = common.value_to_class_all(dtest.get_label()) f1.write("{0}\n".format( round(sk.metrics.accuracy_score(dtest_labels_class, preds_class), 2))) print(sk.metrics.classification_report(dtest_labels_class, preds_class)) f2.write('Regresja,' + str(i_max_depth) + ',' + common.prepare_file_output(dtest_labels_class, preds_class) + "\n") f1.close() f2.close() # PLOT #ax = xgb.plot_tree(bst,num_trees=0) #xgb.plot_importance(bst) #plt.show() #bst.dump_model('dump_raw.txt');
] watchlist = [(dtrain,'train'),(dtest,'eval')] num_round = 1000 evals_result = {} for i_max_depth in range(4, 11): param[0]=('max_depth',i_max_depth) #zmiana glebokosci drzewa print param #bst = xgb.train(param, dtrain, num_round, watchlist, evals_result=evals_result, early_stopping_rounds=100) bst = xgb.train(param, dtrain, num_round, watchlist, feval=common.feval_value, evals_result=evals_result, early_stopping_rounds=100) preds = bst.predict(dtest,ntree_limit=bst.best_ntree_limit) preds_class= common.value_to_class_all(preds) labels_class=common.value_to_class_all(dtest.get_label()) print(sk.metrics.classification_report(labels_class,preds_class)) f1.write("{0}\n".format(round(sk.metrics.accuracy_score(labels_class,preds_class),2))); common.prepare_file_output(labels_class,preds_class); f1.close() # MAKE PREDICTION #preds = bst.predict(dtest) # PLOT #ax = xgb.plot_tree(bst,num_trees=0) #xgb.plot_importance(bst) #plt.show() #bst.dump_model('dump_raw.txt');
('eval_metric', 'mlogloss'), ('eval_metric', 'merror') #2 metryki ] watchlist = [(dtrain, 'train'), (dtest, 'eval')] num_round = 1000 evals_result = {} #for i_max_depth in range(1, 11): #param[0]=('max_depth',i_max_depth) #zmiana glebokosci drzewa print param bst = xgb.train(param, dtrain, num_round, watchlist, evals_result=evals_result, early_stopping_rounds=100) preds = bst.predict(dtest) print(sk.metrics.classification_report(dtest.get_label(), preds)) f1.write("{0}\n".format( round(sk.metrics.accuracy_score(dtest.get_label(), preds), 2))) f2.write('Klasyfikacja funkcja linowa,' + str(1) + ',' + common.prepare_file_output(dtest.get_label(), preds) + "\n") f1.close() f2.close() # PLOT #ax = xgb.plot_tree(bst,num_trees=0) #xgb.plot_importance(bst) #plt.show() #bst.dump_model('dump_raw.txt');