def train_xgb_asha(config): train_set = xgb.DMatrix(x_tr, label=y_tr) test_set = xgb.DMatrix(x_te, label=y_te) xgb.train(config, train_set, 100, evals=[(test_set, 'eval')], verbose_eval=False, early_stopping_rounds=10, callbacks=[TuneReportCheckpointCallback(filename='model.xgb')])
def train_breast_cancer(config: dict): # This is a simple training function to be passed into Tune # Load dataset # Split into train and test set # Build input matrices for XGBoost train_set = xgb.DMatrix(train_x, label=train_y) test_set = xgb.DMatrix(test_x, label=test_y) # Train the classifier, using the Tune callback xgb.train( config, train_set, evals=[(test_set, "eval")], verbose_eval=False, callbacks=[TuneReportCheckpointCallback(filename="model.xgb")])
def train_breast_cancer(config): # Load dataset data, labels = sklearn.datasets.load_breast_cancer(return_X_y=True) # Split into train and test set train_x, test_x, train_y, test_y = train_test_split( data, labels, test_size=0.25) # Build input matrices for XGBoost train_set = xgb.DMatrix(train_x, label=train_y) test_set = xgb.DMatrix(test_x, label=test_y) # Train the classifier xgb.train( config, train_set, evals=[(test_set, "eval")], verbose_eval=False, callbacks=[TuneReportCheckpointCallback(filename="model.xgb")])
def train_breast_cancer(config: dict): # This is a simple training function to be passed into Tune # Load dataset data, labels = sklearn.datasets.load_breast_cancer(return_X_y=True) # Split into train and test set train_x, test_x, train_y, test_y = train_test_split( data, labels, test_size=0.25) # Build input matrices for XGBoost train_set = xgb.DMatrix(train_x, label=train_y) test_set = xgb.DMatrix(test_x, label=test_y) # HyperOpt returns a tuple config = config.copy() config["eval_metric"] = ["logloss", "error"] config["objective"] = "binary:logistic" # Train the classifier, using the Tune callback xgb.train( config, train_set, evals=[(test_set, "eval")], verbose_eval=False, callbacks=[TuneReportCheckpointCallback(filename="model.xgb")])
def train_breast_cancer(config: dict, checkpoint_dir=None): # This is a simple training function to be passed into Tune # Load dataset data, labels = sklearn.datasets.load_breast_cancer(return_X_y=True) # Split into train and test set train_x, test_x, train_y, test_y = train_test_split(data, labels, test_size=0.25) # Build input matrices for XGBoost train_set = xgb.DMatrix(train_x, label=train_y) test_set = xgb.DMatrix(test_x, label=test_y) # Checkpointing needs to be set up in order for dynamic # resource allocation to work as intended xgb_model = None if checkpoint_dir: xgb_model = xgb.Booster() xgb_model.load_model(os.path.join(checkpoint_dir, CHECKPOINT_FILENAME)) # we can obtain current trial resources through # tune.get_trial_resources() config["nthread"] = int(tune.get_trial_resources().head_cpus) print(f"nthreads: {config['nthread']} xgb_model: {xgb_model}") # Train the classifier, using the Tune callback xgb.train( config, train_set, evals=[(test_set, "eval")], verbose_eval=False, xgb_model=xgb_model, callbacks=[ TuneReportCheckpointCallback( filename=CHECKPOINT_FILENAME, # checkpointing should happen every iteration # with dynamic resource allocation frequency=1, ) ], )
def train_breast_cancer(config: dict): # This is a simple training function to be passed into Tune # Load dataset data, labels = sklearn.datasets.load_breast_cancer(return_X_y=True) # Split into train and test set train_x, test_x, train_y, test_y = train_test_split(data, labels, test_size=0.25) # Build input matrices for XGBoost train_set = xgb.DMatrix(train_x, label=train_y) test_set = xgb.DMatrix(test_x, label=test_y) # Train the classifier, using the Tune callback xgb.train( config, train_set, evals=[(test_set, "test")], verbose_eval=False, callbacks=[TuneReportCheckpointCallback(filename="model.xgb")], )