def fit_increment(self, X, y, num_boost_round=1, params=None): trainDmatrix = DMatrix(X, label=y, nthread=self.n_jobs, missing=self.missing) extra_params = params params = self.get_xgb_params() if extra_params is not None: for k, v in extra_params.items(): params[k] = v if "n_estimators" in params: params.pop("n_estimators") if callable(self.objective): obj = _objective_decorator(self.objective) params["objective"] = "reg:linear" else: obj = None if "_Booster" not in dir(self) or self._Booster is None: self._Booster = train(params=params, dtrain=trainDmatrix, num_boost_round=num_boost_round, obj=obj) else: self._Booster = train(params=params, dtrain=trainDmatrix, num_boost_round=num_boost_round, obj=obj, xgb_model=self._Booster) return self
def fit_increment(self, X, y, num_boost_round=1, params=None): trainDmatrix = DMatrix(X, label=y, nthread=self.n_jobs, missing=self.missing) extra_params = params params = { 'objective': 'reg:squarederror', 'learning_rate': None, 'max_depth': None, 'min_child_weight': None, 'n_jobs': None } print(params, extra_params) if extra_params is not None: for k, v in extra_params.items(): params[k] = v if callable(self.objective): obj = _objective_decorator(self.objective) params["objective"] = "reg:linear" else: obj = None if self._Booster is None: self._Booster = train(params=params, dtrain=trainDmatrix, num_boost_round=num_boost_round, obj=obj) else: self._Booster = train(params=params, dtrain=trainDmatrix, num_boost_round=num_boost_round, obj=obj, xgb_model=self._Booster) return self
def fit(self, X, y, sample_weight=None, eval_set=None, eval_metric=None, early_stopping_rounds=None, verbose=True, xgb_model=None): # pylint: disable = attribute-defined-outside-init,arguments-differ """ Fit gradient boosting classifier Parameters ---------- X : array_like Feature matrix y : array_like Labels sample_weight : array_like Weight for each instance eval_set : list, optional A list of (X, y) pairs to use as a validation set for early-stopping eval_metric : str, callable, optional If a str, should be a built-in evaluation metric to use. See doc/parameter.md. If callable, a custom evaluation metric. The call signature is func(y_predicted, y_true) where y_true will be a DMatrix object such that you may need to call the get_label method. It must return a str, value pair where the str is a name for the evaluation and value is the value of the evaluation function. This objective is always minimized. early_stopping_rounds : int, optional Activates early stopping. Validation error needs to decrease at least every <early_stopping_rounds> round(s) to continue training. Requires at least one item in evals. If there's more than one, will use the last. Returns the model from the last iteration (not the best one). If early stopping occurs, the model will have three additional fields: bst.best_score, bst.best_iteration and bst.best_ntree_limit. (Use bst.best_ntree_limit to get the correct value if num_parallel_tree and/or num_class appears in the parameters) verbose : bool If `verbose` and an evaluation set is used, writes the evaluation metric measured on the validation set to stderr. xgb_model : str file name of stored xgb model or 'Booster' instance Xgb model to be loaded before training (allows training continuation). """ evals_result = {} self.classes_ = np.unique(y) self.n_classes_ = len(self.classes_) xgb_options = self.get_xgb_params() if callable(self.objective): obj = _objective_decorator(self.objective) # Use default value. Is it really not used ? xgb_options["objective"] = "binary:logistic" else: obj = None if self.n_classes_ > 2: # Switch to using a multiclass objective in the underlying XGB instance xgb_options["objective"] = "multi:softprob" xgb_options['num_class'] = self.n_classes_ feval = eval_metric if callable(eval_metric) else None if eval_metric is not None: if callable(eval_metric): eval_metric = None else: xgb_options.update({"eval_metric": eval_metric}) self._le = XGBLabelEncoder().fit(y) training_labels = self._le.transform(y) if eval_set is not None: # TODO: use sample_weight if given? evals = list( DMatrix(x[0], label=self._le.transform(x[1]), missing=self.missing, nthread=self.n_jobs) for x in eval_set) nevals = len(evals) eval_names = ["validation_{}".format(i) for i in range(nevals)] evals = list(zip(evals, eval_names)) else: evals = () self._features_count = X.shape[1] if sample_weight is not None: train_dmatrix = DMatrix(X, label=training_labels, weight=sample_weight, missing=self.missing, nthread=self.n_jobs) else: train_dmatrix = DMatrix(X, label=training_labels, missing=self.missing, nthread=self.n_jobs) self._Booster = train( xgb_options, train_dmatrix, self.n_estimators, evals=evals, early_stopping_rounds=early_stopping_rounds, evals_result=evals_result, obj=obj, feval=feval, # Only the last kwarg in of this call was # changed in this file!!! verbose_eval=verbose, xgb_model=xgb_model) self.objective = xgb_options["objective"] if evals_result: for val in evals_result.items(): evals_result_key = list(val[1].keys())[0] evals_result[ val[0]][evals_result_key] = val[1][evals_result_key] self.evals_result_ = evals_result if early_stopping_rounds is not None: self.best_score = self._Booster.best_score self.best_iteration = self._Booster.best_iteration self.best_ntree_limit = self._Booster.best_ntree_limit return self
def fit(self, X, y, sample_weight=None, eval_set=None, eval_metric=None, early_stopping_rounds=None, verbose=True, xgb_model=None): # pylint: disable=missing-docstring,invalid-name,attribute-defined-outside-init """ Fit the gradient boosting model Parameters ---------- X : array_like Feature matrix y : array_like Labels sample_weight : array_like instance weights eval_set : list, optional A list of (X, y) tuple pairs to use as a validation set for early-stopping eval_metric : str, callable, optional If a str, should be a built-in evaluation metric to use. See doc/parameter.md. If callable, a custom evaluation metric. The call signature is func(y_predicted, y_true) where y_true will be a DMatrix object such that you may need to call the get_label method. It must return a str, value pair where the str is a name for the evaluation and value is the value of the evaluation function. This objective is always minimized. early_stopping_rounds : int Activates early stopping. Validation error needs to decrease at least every <early_stopping_rounds> round(s) to continue training. Requires at least one item in evals. If there's more than one, will use the last. Returns the model from the last iteration (not the best one). If early stopping occurs, the model will have three additional fields: bst.best_score, bst.best_iteration and bst.best_ntree_limit. (Use bst.best_ntree_limit to get the correct value if num_parallel_tree and/or num_class appears in the parameters) verbose : bool If `verbose` and an evaluation set is used, writes the evaluation metric measured on the validation set to stderr. xgb_model : str file name of stored xgb model or 'Booster' instance Xgb model to be loaded before training (allows training continuation). """ if sample_weight is not None: trainDmatrix = DMatrix(X, label=y, weight=sample_weight, missing=self.missing, nthread=self.n_jobs) else: trainDmatrix = DMatrix(X, label=y, missing=self.missing, nthread=self.n_jobs) evals_result = {} if eval_set is not None: evals = list( DMatrix(x[0], label=x[1], missing=self.missing, nthread=self.n_jobs) for x in eval_set) evals = list( zip(evals, ["validation_{}".format(i) for i in range(len(evals))])) else: evals = () params = self.get_xgb_params() if callable(self.objective): obj = _objective_decorator(self.objective) params["objective"] = "reg:linear" else: obj = None feval = eval_metric if callable(eval_metric) else None if eval_metric is not None: if callable(eval_metric): eval_metric = None else: params.update({'eval_metric': eval_metric}) self._Booster = train(params, trainDmatrix, self.n_estimators, evals=evals, early_stopping_rounds=early_stopping_rounds, evals_result=evals_result, obj=obj, feval=feval, verbose_eval=verbose, xgb_model=xgb_model) if evals_result: for val in evals_result.items(): evals_result_key = list(val[1].keys())[0] evals_result[ val[0]][evals_result_key] = val[1][evals_result_key] self.evals_result_ = evals_result if early_stopping_rounds is not None: self.best_score = self._Booster.best_score self.best_iteration = self._Booster.best_iteration self.best_ntree_limit = self._Booster.best_ntree_limit return self
def fit( self, X, y, *, sample_weight=None, base_margin=None, eval_set=None, eval_metric=None, early_stopping_rounds=None, verbose=True, xgb_model=None, sample_weight_eval_set=None, base_margin_eval_set=None, feature_weights=None, callbacks=None, ray_params: Union[None, RayParams, Dict] = None, _remote: Optional[bool] = None, ray_dmatrix_params: Optional[Dict] = None, ): evals_result = {} ray_dmatrix_params = ray_dmatrix_params or {} params = self.get_xgb_params() train_dmatrix, evals = _check_if_params_are_ray_dmatrix( X, sample_weight, base_margin, eval_set, sample_weight_eval_set, base_margin_eval_set) if train_dmatrix is not None: if not hasattr(self, "use_label_encoder"): warnings.warn("If X is a RayDMatrix, no label encoding" " will be performed. Ensure the labels are" " encoded.") elif self.use_label_encoder: raise ValueError( "X cannot be a RayDMatrix if `use_label_encoder` " "is set to True") if "num_class" not in params: raise ValueError( "`num_class` must be set during initalization if X" " is a RayDMatrix") self.classes_ = list(range(0, params["num_class"])) self.n_classes_ = params["num_class"] if self.n_classes_ <= 2: params.pop("num_class") label_transform = lambda x: x # noqa: E731 else: if len(X.shape) != 2: # Simply raise an error here since there might be many # different ways of reshaping raise ValueError( "Please reshape the input data X into 2-dimensional " "matrix.") label_transform = self._ray_fit_preprocess(y) if callable(self.objective): obj = _objective_decorator(self.objective) # Use default value. Is it really not used ? params["objective"] = "binary:logistic" else: obj = None if self.n_classes_ > 2: # Switch to using a multiclass objective in the underlying # XGB instance params["objective"] = "multi:softprob" params["num_class"] = self.n_classes_ try: model, feval, params = self._configure_fit(xgb_model, eval_metric, params) except TypeError: # XGBoost >= 1.6.0 (model, feval, params, early_stopping_rounds, callbacks) = self._configure_fit(xgb_model, eval_metric, params, early_stopping_rounds, callbacks) if train_dmatrix is None: train_dmatrix, evals = _wrap_evaluation_matrices( missing=self.missing, X=X, y=y, group=None, qid=None, sample_weight=sample_weight, base_margin=base_margin, feature_weights=feature_weights, eval_set=eval_set, sample_weight_eval_set=sample_weight_eval_set, base_margin_eval_set=base_margin_eval_set, eval_group=None, eval_qid=None, # changed in xgboost-ray: create_dmatrix=lambda **kwargs: RayDMatrix(**{ **kwargs, **ray_dmatrix_params }), **self._ray_get_wrap_evaluation_matrices_compat_kwargs( label_transform=label_transform)) # remove those as they will be set in RayXGBoostActor params.pop("n_jobs", None) params.pop("nthread", None) ray_params = self._ray_set_ray_params_n_jobs(ray_params, self.n_jobs) additional_results = {} self._Booster = train( params, train_dmatrix, self.get_num_boosting_rounds(), evals=evals, early_stopping_rounds=early_stopping_rounds, evals_result=evals_result, obj=obj, feval=feval, verbose_eval=verbose, xgb_model=model, callbacks=callbacks, # changed in xgboost-ray: additional_results=additional_results, ray_params=ray_params, _remote=_remote, ) if not callable(self.objective): self.objective = params["objective"] self.additional_results_ = additional_results self._set_evaluation_result(evals_result) return self
def fit( self, X, y, *, sample_weight=None, base_margin=None, eval_set=None, eval_metric=None, early_stopping_rounds=None, verbose=True, xgb_model: Optional[Union[Booster, str, "XGBModel"]] = None, sample_weight_eval_set=None, base_margin_eval_set=None, feature_weights=None, callbacks=None, ray_params: Union[None, RayParams, Dict] = None, _remote: Optional[bool] = None, ray_dmatrix_params: Optional[Dict] = None, ): evals_result = {} ray_dmatrix_params = ray_dmatrix_params or {} train_dmatrix, evals = _check_if_params_are_ray_dmatrix( X, sample_weight, base_margin, eval_set, sample_weight_eval_set, base_margin_eval_set) if train_dmatrix is None: train_dmatrix, evals = _wrap_evaluation_matrices( missing=self.missing, X=X, y=y, group=None, qid=None, sample_weight=sample_weight, base_margin=base_margin, feature_weights=feature_weights, eval_set=eval_set, sample_weight_eval_set=sample_weight_eval_set, base_margin_eval_set=base_margin_eval_set, eval_group=None, eval_qid=None, # changed in xgboost-ray: create_dmatrix=lambda **kwargs: RayDMatrix(**{ **kwargs, **ray_dmatrix_params }), **self._ray_get_wrap_evaluation_matrices_compat_kwargs()) params = self.get_xgb_params() if callable(self.objective): obj = _objective_decorator(self.objective) params["objective"] = "reg:squarederror" else: obj = None try: model, feval, params = self._configure_fit(xgb_model, eval_metric, params) except TypeError: # XGBoost >= 1.6.0 (model, feval, params, early_stopping_rounds, callbacks) = self._configure_fit(xgb_model, eval_metric, params, early_stopping_rounds, callbacks) # remove those as they will be set in RayXGBoostActor params.pop("n_jobs", None) params.pop("nthread", None) ray_params = self._ray_set_ray_params_n_jobs(ray_params, self.n_jobs) additional_results = {} self._Booster = train( params, train_dmatrix, self.get_num_boosting_rounds(), evals=evals, early_stopping_rounds=early_stopping_rounds, evals_result=evals_result, obj=obj, feval=feval, verbose_eval=verbose, xgb_model=model, callbacks=callbacks, # changed in xgboost-ray: additional_results=additional_results, ray_params=ray_params, _remote=_remote, ) self.additional_results_ = additional_results self._set_evaluation_result(evals_result) return self
def fit(self, X, y, sample_weight=None, eval_set=None, eval_metric=None, early_stopping_rounds=None, verbose=True, xgb_model=None, sample_weight_eval_set=None): """ Fit the gradient boosting model Parameters ---------- X : array_like Feature matrix y : array_like Labels sample_weight : array_like instance weights eval_set : list, optional A list of (X, y) tuple pairs to use as a validation set for early-stopping sample_weight_eval_set : list, optional A list of the form [L_1, L_2, ..., L_n], where each L_i is a list of instance weights on the i-th validation set. eval_metric : str, callable, optional If a str, should be a built-in evaluation metric to use. See doc/parameter.md. If callable, a custom evaluation metric. The call signature is func(y_predicted, y_true) where y_true will be a DMatrix object such that you may need to call the get_label method. It must return a str, value pair where the str is a name for the evaluation and value is the value of the evaluation function. This objective is always minimized. early_stopping_rounds : int Activates early stopping. Validation error needs to decrease at least every <early_stopping_rounds> round(s) to continue training. Requires at least one item in evals. If there's more than one, will use the last. Returns the model from the last iteration (not the best one). If early stopping occurs, the model will have three additional fields: bst.best_score, bst.best_iteration and bst.best_ntree_limit. (Use bst.best_ntree_limit to get the correct value if num_parallel_tree and/or num_class appears in the parameters) verbose : bool If `verbose` and an evaluation set is used, writes the evaluation metric measured on the validation set to stderr. xgb_model : str file name of stored xgb model or 'Booster' instance Xgb model to be loaded before training (allows training continuation). """ X, y = check_X_y(X, y, accept_sparse=False, y_numeric=True) sizes, _, X_features, y, _ = self._preprare_data_in_groups(X, y) params = self.get_xgb_params() if callable(self.objective): obj = _objective_decorator(self.objective) # Dummy, Not used when custom objective is given params["objective"] = "binary:logistic" else: obj = None evals_result = {} feval = eval_metric if callable(eval_metric) else None if eval_metric is not None: if callable(eval_metric): eval_metric = None else: params.update({'eval_metric': eval_metric}) if sample_weight is not None: train_dmatrix = DMatrix(X_features, label=y, weight=sample_weight, missing=self.missing) else: train_dmatrix = DMatrix(X_features, label=y, missing=self.missing) train_dmatrix.set_group(sizes) self._Booster = train(params, train_dmatrix, self.n_estimators, early_stopping_rounds=early_stopping_rounds, evals_result=evals_result, obj=obj, feval=feval, verbose_eval=verbose, xgb_model=xgb_model) if evals_result: for val in evals_result.items(): evals_result_key = list(val[1].keys())[0] evals_result[ val[0]][evals_result_key] = val[1][evals_result_key] self.evals_result = evals_result if early_stopping_rounds is not None: self.best_score = self._Booster.best_score self.best_iteration = self._Booster.best_iteration self.best_ntree_limit = self._Booster.best_ntree_limit return self
def fit(self, X, y, sample_weight=None, eval_set=None, eval_metric=None, early_stopping_rounds=None, verbose=True, xgb_model=None, callbacks=None, learning_rates=None): """ Fit the gradient boosting model Parameters ---------- X : array_like Feature matrix with the first feature containing a group indicator y : array_like Labels sample_weight : array_like instance weights eval_set : list, optional A list of (X, y) tuple pairs to use as a validation set for early-stopping eval_metric : str, callable, optional If a str, should be a built-in evaluation metric to use. See doc/parameter.md. If callable, a custom evaluation metric. The call signature is func(y_predicted, y_true) where y_true will be a DMatrix object such that you may need to call the get_label method. It must return a str, value pair where the str is a name for the evaluation and value is the value of the evaluation function. This objective is always minimized. early_stopping_rounds : int Activates early stopping. Validation error needs to decrease at least every <early_stopping_rounds> round(s) to continue training. Requires at least one item in evals. If there's more than one, will use the last. Returns the model from the last iteration (not the best one). If early stopping occurs, the model will have three additional fields: bst.best_score, bst.best_iteration and bst.best_ntree_limit. (Use bst.best_ntree_limit to get the correct value if num_parallel_tree and/or num_class appears in the parameters) verbose : bool If `verbose` and an evaluation set is used, writes the evaluation metric measured on the validation set to stderr. learning_rates: list or function (deprecated - use callback API instead) List of learning rate for each boosting round or a customized function that calculates eta in terms of current number of round and the total number of boosting round (e.g. yields learning rate decay) xgb_model : file name of stored xgb model or 'Booster' instance Xgb model to be loaded before training (allows training continuation). callbacks : list of callback functions List of callback functions that are applied at end of each iteration. It is possible to use predefined callbacks by using xgb.callback module. Example: [xgb.callback.reset_learning_rate(custom_rates)] """ #X, y = check_X_y(X, y, accept_sparse=False, y_numeric=True) sizes, _, X_features, y, _, _ = self._preprare_data_in_groups(X, y) params = self.get_xgb_params() if callable(self.objective): obj = _objective_decorator(self.objective) # Dummy, Not used when custom objective is given params["objective"] = "binary:logistic" else: obj = None evals_result = {} feval = eval_metric if callable(eval_metric) else None if eval_metric is not None: if callable(eval_metric): eval_metric = None else: params.update({'eval_metric': eval_metric}) if sample_weight is not None: train_dmatrix = DMatrix(X_features, label=y, weight=sample_weight, missing=self.missing) else: train_dmatrix = DMatrix(X_features, label=y, missing=self.missing) train_dmatrix.set_group(sizes) def _dmat_init(group, **params): ret = DMatrix(**params) ret.set_group(group) return ret eval_group = [] neval_set = [] if eval_set: for i in range(len(eval_set)): seval_group, _, X_features, y, _, _ = self._preprare_data_in_groups( eval_set[i][0], eval_set[i][1]) eval_group.append(seval_group) neval_set.append([X_features, y]) if neval_set != []: sample_weight_eval_set = [None] * len(eval_set) evals = [ _dmat_init(eval_group[i], data=neval_set[i][0], label=neval_set[i][1], missing=self.missing, weight=sample_weight_eval_set[i], nthread=self.n_jobs) for i in range(len(neval_set)) ] nevals = len(evals) eval_names = ["eval_{}".format(i) for i in range(nevals)] evals = list(zip(evals, eval_names)) else: evals = () self._Booster = train(params, train_dmatrix, self.n_estimators, evals=evals, early_stopping_rounds=early_stopping_rounds, evals_result=evals_result, obj=obj, feval=feval, verbose_eval=verbose, xgb_model=xgb_model, learning_rates=learning_rates, callbacks=callbacks) if evals_result: for val in evals_result.items(): evals_result_key = list(val[1].keys())[0] evals_result[ val[0]][evals_result_key] = val[1][evals_result_key] self.evals_result = evals_result if early_stopping_rounds is not None: self.best_score = self._Booster.best_score self.best_iteration = self._Booster.best_iteration self.best_ntree_limit = self._Booster.best_ntree_limit return self
def fit(self, X, y, sample_weight=None, eval_set=None, eval_metric=None, early_stopping_rounds=None, verbose=True, xgb_model=None): # pylint: disable=missing-docstring,invalid-name,attribute-defined-outside-init """ Fit the gradient boosting model Parameters ---------- X : array_like Feature matrix y : array_like Labels sample_weight : array_like instance weights eval_set : list, optional A list of (X, y) tuple pairs to use as a validation set for early-stopping eval_metric : str, callable, optional If a str, should be a built-in evaluation metric to use. See doc/parameter.md. If callable, a custom evaluation metric. The call signature is func(y_predicted, y_true) where y_true will be a DMatrix object such that you may need to call the get_label method. It must return a str, value pair where the str is a name for the evaluation and value is the value of the evaluation function. This objective is always minimized. early_stopping_rounds : int Activates early stopping. Validation error needs to decrease at least every <early_stopping_rounds> round(s) to continue training. Requires at least one item in evals. If there's more than one, will use the last. Returns the model from the last iteration (not the best one). If early stopping occurs, the model will have three additional fields: bst.best_score, bst.best_iteration and bst.best_ntree_limit. (Use bst.best_ntree_limit to get the correct value if num_parallel_tree and/or num_class appears in the parameters) verbose : bool If `verbose` and an evaluation set is used, writes the evaluation metric measured on the validation set to stderr. xgb_model : str file name of stored xgb model or 'Booster' instance Xgb model to be loaded before training (allows training continuation). """ if sample_weight is not None: trainDmatrix = DMatrix(X, label=y, weight=sample_weight, missing=self.missing, nthread=self.n_jobs) else: trainDmatrix = DMatrix(X, label=y, missing=self.missing, nthread=self.n_jobs) evals_result = {} if eval_set is not None: evals = list(DMatrix(x[0], label=x[1], missing=self.missing, nthread=self.n_jobs) for x in eval_set) evals = list(zip(evals, ["validation_{}".format(i) for i in range(len(evals))])) else: evals = () params = self.get_xgb_params() if callable(self.objective): obj = _objective_decorator(self.objective) params["objective"] = "reg:linear" else: obj = None feval = eval_metric if callable(eval_metric) else None if eval_metric is not None: if callable(eval_metric): eval_metric = None else: params.update({'eval_metric': eval_metric}) self._Booster = train(params, trainDmatrix, self.n_estimators, evals=evals, early_stopping_rounds=early_stopping_rounds, evals_result=evals_result, obj=obj, feval=feval, verbose_eval=verbose, xgb_model=xgb_model) if evals_result: for val in evals_result.items(): evals_result_key = list(val[1].keys())[0] evals_result[val[0]][evals_result_key] = val[1][ evals_result_key] self.evals_result_ = evals_result if early_stopping_rounds is not None: self.best_score = self._Booster.best_score self.best_iteration = self._Booster.best_iteration self.best_ntree_limit = self._Booster.best_ntree_limit return self
def fit(self, X, y, sample_weight=None, eval_set=None, eval_metric=None, early_stopping_rounds=None, verbose=True, xgb_model=None): # pylint: disable = attribute-defined-outside-init,arguments-differ """ Fit gradient boosting classifier Parameters ---------- X : array_like Feature matrix y : array_like Labels sample_weight : array_like Weight for each instance eval_set : list, optional A list of (X, y) pairs to use as a validation set for early-stopping eval_metric : str, callable, optional If a str, should be a built-in evaluation metric to use. See doc/parameter.md. If callable, a custom evaluation metric. The call signature is func(y_predicted, y_true) where y_true will be a DMatrix object such that you may need to call the get_label method. It must return a str, value pair where the str is a name for the evaluation and value is the value of the evaluation function. This objective is always minimized. early_stopping_rounds : int, optional Activates early stopping. Validation error needs to decrease at least every <early_stopping_rounds> round(s) to continue training. Requires at least one item in evals. If there's more than one, will use the last. Returns the model from the last iteration (not the best one). If early stopping occurs, the model will have three additional fields: bst.best_score, bst.best_iteration and bst.best_ntree_limit. (Use bst.best_ntree_limit to get the correct value if num_parallel_tree and/or num_class appears in the parameters) verbose : bool If `verbose` and an evaluation set is used, writes the evaluation metric measured on the validation set to stderr. xgb_model : str file name of stored xgb model or 'Booster' instance Xgb model to be loaded before training (allows training continuation). """ evals_result = {} self.classes_ = np.unique(y) self.n_classes_ = len(self.classes_) xgb_options = self.get_xgb_params() if callable(self.objective): obj = _objective_decorator(self.objective) # Use default value. Is it really not used ? xgb_options["objective"] = "binary:logistic" else: obj = None if self.n_classes_ > 2: # Switch to using a multiclass objective in the underlying XGB instance xgb_options["objective"] = "multi:softprob" xgb_options['num_class'] = self.n_classes_ feval = eval_metric if callable(eval_metric) else None if eval_metric is not None: if callable(eval_metric): eval_metric = None else: xgb_options.update({"eval_metric": eval_metric}) self._le = XGBLabelEncoder().fit(y) training_labels = self._le.transform(y) if eval_set is not None: # TODO: use sample_weight if given? evals = list( DMatrix(x[0], label=self._le.transform(x[1]), missing=self.missing, nthread=self.n_jobs) for x in eval_set ) nevals = len(evals) eval_names = ["validation_{}".format(i) for i in range(nevals)] evals = list(zip(evals, eval_names)) else: evals = () self._features_count = X.shape[1] if sample_weight is not None: train_dmatrix = DMatrix(X, label=training_labels, weight=sample_weight, missing=self.missing, nthread=self.n_jobs) else: train_dmatrix = DMatrix(X, label=training_labels, missing=self.missing, nthread=self.n_jobs) self._Booster = train(xgb_options, train_dmatrix, self.n_estimators, evals=evals, early_stopping_rounds=early_stopping_rounds, evals_result=evals_result, obj=obj, feval=feval, # Only the last kwarg in of this call was # changed in this file!!! verbose_eval=verbose, xgb_model=xgb_model) self.objective = xgb_options["objective"] if evals_result: for val in evals_result.items(): evals_result_key = list(val[1].keys())[0] evals_result[val[0]][evals_result_key] = val[1][ evals_result_key] self.evals_result_ = evals_result if early_stopping_rounds is not None: self.best_score = self._Booster.best_score self.best_iteration = self._Booster.best_iteration self.best_ntree_limit = self._Booster.best_ntree_limit return self