Example #1
0
 def output_params(self, ret, clf, fit_params):
     amp = {"resolved": ret, "other": {}}
     params = clf.get_params()
     logging.info("Selected XGBoost Params are %s " % params)
     safe_del(ret, "xgboost")
     ret["xgboost"] = {}
     ret["xgboost"]["max_depth"] = params["max_depth"]
     ret["xgboost"]["learning_rate"] = params["learning_rate"]
     ret["xgboost"]["n_estimators"] = params["n_estimators"]
     ret["xgboost"]["nthread"] = params["n_jobs"] if params[
         "n_jobs"] > 0 else -1  # TODO: change => migration ?
     ret["xgboost"]["gamma"] = params["gamma"]
     ret["xgboost"]["min_child_weight"] = params["min_child_weight"]
     ret["xgboost"]["max_delta_step"] = params["max_delta_step"]
     ret["xgboost"]["subsample"] = params["subsample"]
     ret["xgboost"]["colsample_bytree"] = params["colsample_bytree"]
     ret["xgboost"]["colsample_bylevel"] = params["colsample_bylevel"]
     ret["xgboost"]["alpha"] = params["reg_alpha"]
     ret["xgboost"]["lambda"] = params["reg_lambda"]
     ret["xgboost"]["seed"] = params[
         "random_state"]  # TODO: change => migration ?
     ret["xgboost"]["impute_missing"] = True if params["missing"] else False
     ret["xgboost"]["missing"] = params["missing"]
     ret["xgboost"]["base_score"] = params["base_score"]
     ret["xgboost"]["scale_pos_weight"] = params["scale_pos_weight"]
     ret["xgboost"]["enable_early_stopping"] = fit_params.get(
         'early_stopping_rounds') is not None
     ret["xgboost"]["early_stopping_rounds"] = fit_params.get(
         'early_stopping_rounds')
     ret["xgboost"]["booster"] = params.get("booster")
     ret["xgboost"]["objective"] = params.get("objective").replace(":", "_")
     return amp
Example #2
0
 def output_params(self, ret, clf, fit_params):
     amp = {"resolved": ret, "other": {}}
     params = clf.get_params()
     logging.info("Selected Ordinary Least Squares Params are %s " % params)
     safe_del(ret, "least_squares")
     ret["n_jobs"] = params["n_jobs"]
     return amp
Example #3
0
 def output_params(self, ret, clf, fit_params):
     amp = {"resolved": ret, "other": {}}
     safe_del(ret, "ridge_grid")
     params = clf.get_params()
     ret["lasso"] = {}
     if hasattr(clf, "alpha_"):
         ret["lasso"]["alpha"] = params.get("alpha", clf.alpha_)
     else:
         ret["lasso"]["alpha"] = params.get("alpha", 0)
     return amp
Example #4
0
 def output_params(self, ret, clf, fit_params):
     amp = {"resolved": ret, "other": {}}
     safe_del(ret, "logit_grid")
     params = clf.get_params()
     logging.info("LR Params are %s " % params)
     ret["logit"] = {
         "penalty": params["penalty"],
         "multi_class": params["multi_class"],
         "C": params["C"]
     }
     return amp
Example #5
0
 def output_params(self, ret, clf, fit_params):
     amp = {"resolved": ret, "other": {}}
     safe_del(ret, "dtc_classifier_grid")
     params = clf.get_params()
     logging.info("DT params are %s " % params)
     ret["dt"] = {
         "max_depth": params["max_depth"],
         "criterion": params["criterion"],
         "min_samples_leaf": params["min_samples_leaf"],
         "splitter": params["splitter"]
     }
     return amp
Example #6
0
 def output_params(self, ret, clf, fit_params):
     amp = {"resolved": ret, "other": {}}
     params = clf.get_params()
     logging.info("Selected KNN Params are %s " % params)
     safe_del(ret, "knn_grid")
     ret["knn"] = {
         "k": params["n_neighbors"],
         "distance_weighting": params["weights"] == "distance",
         "algorithm": params["algorithm"],
         "p": params["p"],
         "leaf_size": params["leaf_size"],
     }
     return amp
Example #7
0
 def output_params(self, ret, clf, fit_params):
     amp = {"resolved": ret, "other": {}}
     params = clf.get_params()
     logging.info("Selected SGD Params are %s " % params)
     safe_del(ret, "sgd_grid")
     ret["sgd"] = {
         "loss": params["loss"],
         "penalty": params["penalty"],
         "alpha": params["alpha"],
         "l1_ratio": params["l1_ratio"],
         "n_jobs": params["n_jobs"],
         "n_iter": clf.n_iter_
     }
     return amp
Example #8
0
    def output_params(self, ret, clf, fit_params):
        amp = {"resolved": ret, "other": {}}
        params = clf.get_params()
        logging.info("Selected SVC Params are %s " % params)
        safe_del(ret, "svc_grid")

        ret["svm"] = {
            "C": params["C"],
            "kernel": params["kernel"],
            "gamma": 0.0 if params["gamma"] == 'auto' else params["gamma"],
            "tol": params["tol"],
            "max_iter": params["max_iter"],
            "coef0": params["coef0"]
        }
        return amp
Example #9
0
 def output_params(self, ret, clf, fit_params):
     amp = {"resolved": ret, "other": {}}
     safe_del(ret, "extra_trees_grid")
     params = clf.get_params()
     logging.info("Extra trees Params are %s " % params)
     ret["extra_trees"] = {
         "estimators": len(clf.estimators_),
         "njobs": params["n_jobs"] if params["n_jobs"] > 0 else -1,
         "max_tree_depth": params["max_depth"],
         "min_samples_leaf": params["min_samples_leaf"],
         "selection_mode": get_selection_mode(params["max_features"]),
     }
     if ret["extra_trees"]["selection_mode"] == "number":
         ret["extra_trees"]["max_features"] = clf.max_features
     if ret["extra_trees"]["selection_mode"] == "prop":
         ret["extra_trees"]["max_feature_prop"] = clf.max_features
     amp["other"]["rf_min_samples_split"] = params["min_samples_split"]
     return amp
Example #10
0
    def output_params(self, ret, clf, fit_params):
        amp = {"resolved": ret, "other": {}}
        safe_del(ret, "gbt_classifier_grid")
        params = clf.get_params()
        logging.info("GBT Params are %s " % params)

        ret["gbt"] = {
            "n_estimators": len(clf.estimators_),
            "max_depth": params["max_depth"],
            "learning_rate": params["learning_rate"],
            "min_samples_leaf": params["min_samples_leaf"],
            "selection_mode": get_selection_mode(params["max_features"]),
            "loss": params["loss"]
        }
        if ret["gbt"]["selection_mode"] == "number":
            ret["gbt"]["max_features"] = ret["gbt_selection_mode"]
        if ret["gbt"]["selection_mode"] == "prop":
            ret["gbt"]["max_feature_prop"] = ret["gbt_selection_mode"]

        return amp
Example #11
0
    def output_params(self, ret, clf, fit_params):
        amp = {"resolved": ret, "other": {}}
        safe_del(ret, "rf_classifier_grid")
        params = clf.get_params()
        logging.info("Obtained RF CLF params: %s " % params)

        ret["rf"] = {
            "estimators": len(clf.estimators_),
            "max_tree_depth": params["max_depth"],
            "min_samples_leaf": params["min_samples_leaf"],
            "selection_mode": get_selection_mode(params["max_features"]),
        }

        if ret["rf"]["selection_mode"] == "number":
            ret["rf"]["max_features"] = clf.max_features
        if ret["rf"]["selection_mode"] == "prop":
            ret["rf"]["max_feature_prop"] = clf.max_features

        amp["other"]["rf_min_samples_split"] = params["min_samples_split"]

        return amp