def train_gbtree(X_train, y_train):

    # Training
    print('Training model...')
    # shuffle X and y
    X_train, y_train = shuffle(X_train, y_train, random_state=0)
    if args.gb_tool == 'xgboost':
        model = XGBClassifier(
            objective='binary:logistic',
            booster='gbtree',
            learning_rate=0.05,
            n_estimators=200,
            max_depth=3,
            min_child_weight=6,
            verbosity=1,
        )
        model.fit(X_train, y_train)
        params = model.get_params()
    else:
        model = CatBoostClassifier(
            verbose=0,
            cat_features=cat_features,
            random_state=args.rs_model,
            # scale_pos_weight=(1 - pos_rate) / pos_rate
        )
        model.fit(X_train, y_train)
        params = model.get_all_params()

    print('Parameters:', params)
    print('Done.')

    return model
def train_gbtree(X_train, y_train, pos_rate, args):
    # Training
    print('Training model...')
    if args.gb_tool == 'xgboost':
        model = XGBClassifier(objective='binary:logistic',
                              booster='gbtree',
                              learning_rate=0.05,
                              n_estimators=200,
                              max_depth=3,
                              min_child_weight=6,
                              verbosity=1
                              )
    else:
        model = CatBoostClassifier(verbose=0,
                                   # scale_pos_weight=(1 - pos_rate) / pos_rate,
                                   learning_rate=args.lr,
                                   depth=args.depth,
                                   l2_leaf_reg=args.l2
                                   )

    model.fit(X_train, y_train)
    params = model.get_params() if args.gb_tool == 'xgboost' else model.get_all_params()
    print('Parameters:', params)
    print('Done.')

    return model