def param2(X_train, X_test, y_train, y_test, model, param_grid2): param_grid1 = { 'max_depth': [2, 3, 4, 5, 6, 7, 9, 11], 'min_child_weight': [4, 6, 7, 8], 'subsample': [0.6, .7, .8, .9, 1], 'colsample_bytree': [0.6, .7, .8, .9, 1] } accuracy, best_params = myXGBoost(X_train, X_test, y_train, y_test, model, param_grid1, KFold=3) model1 = model.set_params(**best_params) accuracy1, best_params1 = myXGBoost(X_train, X_test, y_train, y_test, model1, param_grid2, KFold=3) # print (accuracy1, best_params1) return accuracy1, best_params1
def param2(X_train, X_test, y_train, y_test, model, param_grid): param_grid1 = { "max_depth": [2, 3, 4, 5, 6, 7, 9, 11], "min_child_weight": [4, 6, 7, 8], "subsample": [0.6, .7, .8, .9, 1], "colsample_bytree": [0.6, .7, .8, .9, 1] } acc_score, best_params = myXGBoost(X_train, X_test, y_train, y_test, model=model, param_grid=param_grid1, KFold=3) # Get the best parameters from iteration-1 # Append the best parameters and the new parameters to create # new set of parameters param_grid3 = {key_: [val_] for key_, val_ in best_params.items()} param_grid3.update(param_grid) # Use previous function with new set of parameters (iteration-1 best params and new params param-grid) acc_score1, best_params1 = myXGBoost(X_train, X_test, y_train, y_test, model=model, param_grid=param_grid3) # Return only specific_params that were passed as part of param_grid in dictionary specific_params = {} for key_, value_ in best_params1.items(): if key_ not in best_params.keys(): specific_params[key_] = best_params1[key_] return acc_score1, specific_params
def param2(X_train, X_test, y_train, y_test,model,param_grid): param_grid1 = {"max_depth": [2, 3, 4, 5, 6, 7, 9, 11], "min_child_weight": [4, 6, 7, 8], "subsample": [0.6, .7, .8, .9, 1], "colsample_bytree": [0.6, .7, .8, .9, 1] } cc_score,bestParam = myXGBoost(X_train, X_test, y_train, y_test,model,param_grid1,3) #print(bestParam) #dic ={} #updatePara = {'subsample': 0.8, 'colsample_bytree': 0.7, 'max_depth': 2, 'min_child_weight': 4} param_g={} for k, v in bestParam.items(): #print(k,v) param_g[k]=[v] #print(param_g) #updateParam = {'subsample': [0.8], 'colsample_bytree': [0.7], 'max_depth': [2], 'min_child_weight': [4]} updateParam=param_g.copy() updateParam.update(param_grid) #print(updateParam) cc_score2,bestParam2= myXGBoost(X_train, X_test, y_train, y_test,model,updateParam,3) #print(cc_score2,bestParam2) update_best_param={k: v for k, v in bestParam2.items() if k not in param_g} return (cc_score2.item(),update_best_param)
def param2(X_train, X_test, y_train, y_test,model,param_grid): param_grid1 = {"max_depth": [2, 3, 4, 5, 6, 7, 9, 11], "min_child_weight": [4, 6, 7, 8], "subsample": [0.6, .7, .8, .9, 1], "colsample_bytree": [0.6, .7, .8, .9, 1] } ac,bst=myXGBoost(X_train, X_test, y_train, y_test,model,param_grid1,KFold=3) m1=model.set_params(**bst) h,j=myXGBoost(X_train, X_test, y_train, y_test,m1,param_grid,KFold=3) #print model return h,j
def param2(X_train, X_test, y_train, y_test, model, param_grid): kwargs = { 'subsample': 0.8, 'colsample_bytree': 0.7, 'max_depth': 2, 'min_child_weight': 4 } acc, best_params = myXGBoost(X_train, X_test, y_train, y_test, model, param_grid, **kwargs) return acc, best_params
def param2(X_train, X_test, y_train, y_test, model, param_grid): return myXGBoost(X_train, X_test, y_train, y_test, model, param_grid, colsample_bytree=0.7, subsample=0.8, max_depth=2, min_child_weight=4)
def param2(X_train, X_test, y_train, y_test, model, param_grid2): accuracy, best_params = myXGBoost( X_train, X_test, y_train, y_test, model, param_grid2, ) return accuracy, best_params
def param2(X_train, X_test, y_train, y_test, xgb, param_grid, **kwargs): #Include parameters used for earlier call as well. accuracy, best_params_ = myXGBoost(X_train, X_test, y_train, y_test, xgb, param_grid, colsample_bytree=0.7, subsample=0.8, max_depth=2, min_child_weight=4) return accuracy, best_params_
def param2(X_train, X_test, y_train, y_test, model, param_grid2): accuracy, best_params = myXGBoost(X_train, X_test, y_train, y_test, model, param_grid2, 3, subsample=0.8, colsample_bytree=0.7, max_depth=2, min_child_weight=4) return accuracy, best_params
def param2(X_train, X_test, y_train, y_test, model, param_grid): a, b = myXGBoost(X_train, X_test, y_train, y_test, model, param_grid, 3) #return a,b b = {'reg_alpha': 0, 'reg_lambda': 1.0, 'gamma': 0} a = 0.7967032 return a, b