def __init__(self, **kwargs):
        super().__init__()
        tune_grid = {}
        tune_distributions = {}

        tune_grid = {
            "n_estimators":
            np_list_arange(10, 300, 10, inclusive=True),
            "learning_rate":
            np_list_arange(0.001, 0.5, 0.001, inclusive=True),
            "subsample":
            np_list_arange(0.2, 1, 0.05, inclusive=True),
            "min_samples_split": [2, 4, 5, 7, 9, 10],
            "min_samples_leaf": [1, 2, 3, 4, 5],
            "max_depth":
            np_list_arange(1, 11, 1, inclusive=True),
            "min_impurity_decrease": [
                0,
                0.0001,
                0.001,
                0.01,
                0.0002,
                0.002,
                0.02,
                0.0005,
                0.005,
                0.05,
                0.1,
                0.2,
                0.3,
                0.4,
                0.5,
            ],
            "max_features": [1.0, "sqrt", "log2"],
        }
        tune_distributions = {
            "n_estimators":
            IntUniformDistribution(10, 300),
            "learning_rate":
            UniformDistribution(0.000001, 0.5, log=True),
            "subsample":
            UniformDistribution(0.2, 1),
            "min_samples_split":
            IntUniformDistribution(2, 10),
            "min_samples_leaf":
            IntUniformDistribution(1, 5),
            "max_depth":
            IntUniformDistribution(1, 11),
            "max_features":
            UniformDistribution(0.4, 1),
            "min_impurity_decrease":
            UniformDistribution(0.000000001, 0.5, log=True),
        }

        self.tune_grid = tune_grid
        self.tune_distributions = tune_distributions
        self.estimator = GradientBoostingRegressor(**kwargs)
    def __init__(self, **kwargs):
        tune_grid = {}
        tune_distributions = {}
        tune_grid = {
            "n_estimators":
            np_list_arange(10, 300, 10, inclusive=True),
            "criterion": ["gini", "entropy"],
            "max_depth":
            np_list_arange(1, 11, 1, inclusive=True),
            "min_impurity_decrease": [
                0,
                0.0001,
                0.001,
                0.01,
                0.0002,
                0.002,
                0.02,
                0.0005,
                0.005,
                0.05,
                0.1,
                0.2,
                0.3,
                0.4,
                0.5,
            ],
            "max_features": [1.0, "sqrt", "log2"],
            "bootstrap": [True, False],
            "min_samples_split": [2, 5, 7, 9, 10],
            "min_samples_leaf": [2, 3, 4, 5, 6],
            "class_weight": ["balanced", "balanced_subsample", {}],
        }
        tune_distributions = {
            "n_estimators":
            IntUniformDistribution(10, 300),
            "max_depth":
            IntUniformDistribution(1, 11),
            "min_samples_split":
            IntUniformDistribution(2, 10),
            "min_samples_leaf":
            IntUniformDistribution(1, 5),
            "max_features":
            UniformDistribution(0.4, 1),
            "min_impurity_decrease":
            UniformDistribution(0.000000001, 0.5, log=True),
        }
        self.tune_grid = tune_grid

        self.tune_distributions = tune_distributions
        self.estimator = ExtraTreesClassifier(**kwargs)
Exemplo n.º 3
0
    def __init__(self,
                 tol=0.001,
                 loss='hinge',
                 penalty='l2',
                 eta0=0.001,
                 **kwargs):
        tune_grid = {}
        tune_distributions = {}
        tune_grid = {
            "penalty": ["elasticnet", "l2", "l1"],
            "l1_ratio":
            np_list_arange(0.0000000001, 1, 0.01, inclusive=False),
            "alpha": [
                0.0000001,
                0.000001,
                0.0001,
                0.001,
                0.01,
                0.0002,
                0.002,
                0.02,
                0.0005,
                0.005,
                0.05,
                0.1,
                0.15,
                0.2,
                0.3,
                0.4,
                0.5,
            ],
            "fit_intercept": [True, False],
            "learning_rate": ["constant", "invscaling", "adaptive", "optimal"],
            "eta0": [0.001, 0.01, 0.05, 0.1, 0.2, 0.3, 0.4, 0.5],
        }
        tune_distributions = {
            "l1_ratio": UniformDistribution(0.0000000001, 0.9999999999),
            "alpha": UniformDistribution(0.0000000001, 0.9999999999, log=True),
            "eta0": UniformDistribution(0.001, 0.5, log=True),
        }
        self.tune_grid = tune_grid

        self.tune_distributions = tune_distributions
        self.estimator = SGDClassifier(tol=tol,
                                       loss=loss,
                                       penalty=penalty,
                                       eta0=eta0,
                                       **kwargs)
Exemplo n.º 4
0
    def __init__(self, **kwargs):
        super().__init__()
        tune_grid = {}
        tune_distributions = {}

        tune_grid = {
            "fit_intercept": [True, False],
            "normalize": [True, False],
            "eps": [
                0.00001,
                0.0001,
                0.001,
                0.01,
                0.05,
                0.0005,
                0.005,
                0.00005,
                0.02,
                0.007,
                0.1,
            ],
        }
        tune_distributions = {
            "eps": UniformDistribution(0.00001, 0.1),
        }

        self.tune_grid = tune_grid
        self.tune_distributions = tune_distributions
        self.estimator = Lars(**kwargs)
Exemplo n.º 5
0
    def __init__(self, **kwargs):
        tune_grid = {}
        tune_distributions = {}
        tune_grid = {
            "solver": ["lsqr", "eigen"],
            "shrinkage": [
                None,
                "auto",
                0.0001,
                0.001,
                0.01,
                0.0005,
                0.005,
                0.05,
                0.1,
                0.2,
                0.3,
                0.4,
                0.5,
                0.6,
                0.7,
                0.8,
                0.9,
                1,
            ],
        }
        tune_distributions = {
            "solver": CategoricalDistribution(['lsqr', 'eigen']),
            "shrinkage": UniformDistribution(0.0001, 1, log=True),
        }
        self.tune_grid = tune_grid

        self.tune_distributions = tune_distributions
        self.estimator = LinearDiscriminantAnalysis(**kwargs)
Exemplo n.º 6
0
    def __init__(self, **kwargs):
        tune_grid = {}
        tune_distributions = {}
        tune_grid = {
            "learning_rate": ["constant", "invscaling", "adaptive"],
            "alpha": [
                0.0000001,
                0.000001,
                0.0001,
                0.001,
                0.01,
                0.0005,
                0.005,
                0.05,
                0.1,
                0.15,
                0.2,
                0.3,
                0.4,
                0.5,
                0.7,
                0.9,
            ],
            "activation": ["tanh", "identity", "logistic", "relu"],
        }
        tune_distributions = {
            "alpha": UniformDistribution(0.0000000001, 0.9999999999, log=True),
            "activation": CategoricalDistribution(["tanh", "identity", "logistic", "relu"]),
            "learning_rate": CategoricalDistribution(["constant", "invscaling", "adaptive"])
        }
        self.tune_grid = tune_grid

        self.tune_distributions = tune_distributions
        self.estimator = MLPClassifier(**kwargs)
Exemplo n.º 7
0
    def __init__(self, **kwargs):
        tune_grid = {}
        tune_distributions = {}
        tune_grid = {"reg_param": np_list_arange(0, 1, 0.01, inclusive=True)}
        tune_distributions = {"reg_param": UniformDistribution(0, 1)}
        self.tune_grid = tune_grid

        self.tune_distributions = tune_distributions
        self.estimator = QuadraticDiscriminantAnalysis(**kwargs)
Exemplo n.º 8
0
    def __init__(self, **kwargs):
        tune_grid = {}
        tune_distributions = {}
        tune_grid = {
            "max_depth":
            np_list_arange(1, 16, 1, inclusive=True),
            "max_features": [1.0, "sqrt", "log2"],
            "min_samples_leaf": [2, 3, 4, 5, 6],
            "min_samples_split": [2, 5, 7, 9, 10],
            "criterion": ["gini", "entropy"],
            "min_impurity_decrease": [
                0,
                0.0001,
                0.001,
                0.01,
                0.0002,
                0.002,
                0.02,
                0.0005,
                0.005,
                0.05,
                0.1,
                0.2,
                0.3,
                0.4,
                0.5,
            ],
        }
        tune_distributions = {
            "max_depth":
            IntUniformDistribution(1, 16),
            "max_features":
            UniformDistribution(0.4, 1),
            "min_samples_leaf":
            IntUniformDistribution(2, 6),
            "min_samples_split":
            IntUniformDistribution(2, 10),
            "min_impurity_decrease":
            UniformDistribution(0.000000001, 0.5, log=True),
        }
        self.tune_grid = tune_grid

        self.tune_distributions = tune_distributions
        self.estimator = DecisionTreeClassifier(**kwargs)
Exemplo n.º 9
0
    def __init__(self, **kwargs):
        super().__init__()
        tune_grid = {}
        tune_distributions = {}

        tune_grid = {
            "alpha": np_list_arange(0.01, 10, 0.01, inclusive=True),
            "l1_ratio": np_list_arange(0.01, 1, 0.001, inclusive=False),
            "fit_intercept": [True, False],
            "normalize": [True, False],
        }
        tune_distributions = {
            "alpha": UniformDistribution(0, 1),
            "l1_ratio": UniformDistribution(0.01, 0.9999999999),
        }

        self.tune_grid = tune_grid
        self.tune_distributions = tune_distributions
        self.estimator = ElasticNet(**kwargs)
    def __init__(self, **kwargs):
        tune_grid = {}
        tune_distributions = {}
        tune_grid['penalty'] = ["l2", "none"]
        tune_grid['C'] = np_list_arange(0, 10, 1.0, inclusive=True)

        tune_distributions['C'] = UniformDistribution(0, 10)
        self.tune_grid = tune_grid

        self.tune_distributions = tune_distributions
        self.estimator = LogisticRegression(**kwargs)
Exemplo n.º 11
0
    def __init__(self, **kwargs):
        tune_grid = {}
        tune_distributions = {}
        tune_grid = {
            "C": np_list_arange(0, 50, 0.01, inclusive=True),
            "class_weight": ["balanced", {}],
        }
        tune_distributions = {
            "C": UniformDistribution(0, 50),
        }
        self.tune_grid = tune_grid

        self.tune_distributions = tune_distributions
        self.estimator = SVC(**kwargs)
    def __init__(self, **kwargs):
        tune_grid = {}
        tune_distributions = {}
        tune_grid = {
            "n_estimators": np_list_arange(10, 300, 10, inclusive=True),
            "learning_rate": np_list_arange(0.001, 0.5, 0.001, inclusive=True),
            "algorithm": ["SAMME", "SAMME.R"],
        }
        tune_distributions = {
            "n_estimators": IntUniformDistribution(10, 300),
            "learning_rate": UniformDistribution(0.000001, 0.5, log=True),
        }

        self.tune_grid = tune_grid
        self.tune_distributions = tune_distributions
        self.estimator = AdaBoostClassifier(**kwargs)
Exemplo n.º 13
0
    def __init__(self, **kwargs):
        # super(RidgeClassifierContainer, self).__init__()
        super().__init__()
        tune_grid = {}
        tune_distributions = {}
        tune_grid = {
            "normalize": [True, False],
        }

        tune_grid["alpha"] = np_list_arange(0.01, 10, 0.01, inclusive=False)
        tune_grid["fit_intercept"] = [True, False]
        tune_distributions["alpha"] = UniformDistribution(0.001, 10)

        self.tune_grid = tune_grid
        self.tune_distributions = tune_distributions
        self.estimator = RidgeClassification(**kwargs)
    def __init__(self, **kwargs):
        tune_grid = {}
        tune_distributions = {}
        tune_grid = {
            "var_smoothing": [
                0.000000001,
                0.000000002,
                0.000000005,
                0.000000008,
                0.000000009,
                0.0000001,
                0.0000002,
                0.0000003,
                0.0000005,
                0.0000007,
                0.0000009,
                0.00001,
                0.001,
                0.002,
                0.003,
                0.004,
                0.005,
                0.007,
                0.009,
                0.004,
                0.005,
                0.006,
                0.007,
                0.008,
                0.009,
                0.01,
                0.1,
                1,
            ]
        }
        tune_distributions = {
            "var_smoothing": UniformDistribution(0.000000001, 1, log=True)
        }
        self.tune_grid = tune_grid

        self.tune_distributions = tune_distributions
        self.estimator = GaussianNB(**kwargs)
Exemplo n.º 15
0
    def __init__(self, **kwargs):
        super().__init__()
        tune_grid = {}
        tune_distributions = {}

        tune_grid = {
            "depth": list(range(1, 12)),
            "n_estimators": np_list_arange(10, 300, 10, inclusive=True),
            "random_strength": np_list_arange(0, 0.8, 0.1, inclusive=True),
            "l2_leaf_reg":
            [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 20, 30, 50, 100, 200],
        }
        tune_distributions = {
            "depth": IntUniformDistribution(1, 11),
            "n_estimators": IntUniformDistribution(10, 300),
            "random_strength": UniformDistribution(0, 0.8),
            "l2_leaf_reg": IntUniformDistribution(1, 200, log=True),
        }
        self.tune_grid = tune_grid
        self.tune_distributions = tune_distributions
        self.estimator = CatBoostRegressor(**kwargs)
Exemplo n.º 16
0
    def __init__(self, **kwargs):
        super().__init__()
        tune_grid = {}
        tune_distributions = {}

        tune_grid = {
            "learning_rate": ["constant", "invscaling", "adaptive"],
            "alpha": [
                0.0000001,
                0.000001,
                0.0001,
                0.001,
                0.01,
                0.0005,
                0.005,
                0.05,
                0.1,
                0.15,
                0.2,
                0.3,
                0.4,
                0.5,
                0.7,
                0.9,
            ],
            "hidden_layer_size_0": [50, 100],
            "hidden_layer_size_1": [0, 50, 100],
            "hidden_layer_size_2": [0, 50, 100],
            "activation": ["tanh", "identity", "logistic", "relu"],
        }
        tune_distributions = {
            "alpha": UniformDistribution(0.0000000001, 0.9999999999, log=True),
            "hidden_layer_size_0": IntUniformDistribution(50, 100),
            "hidden_layer_size_1": IntUniformDistribution(0, 100),
            "hidden_layer_size_2": IntUniformDistribution(0, 100),
        }

        self.tune_grid = tune_grid
        self.tune_distributions = tune_distributions
        self.estimator = MLPRegressor(**kwargs)
Exemplo n.º 17
0
    def __init__(self, **kwargs):
        super().__init__()
        tune_grid = {}
        tune_distributions = {}

        tune_grid = {
            "num_leaves": [10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 150, 200],
            "learning_rate": np_list_arange(0.001, 0.5, 0.001, inclusive=True),
            "n_estimators": np_list_arange(10, 300, 10, inclusive=True),
            "min_split_gain": [0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9],
            "reg_alpha": [
                0.0000001,
                0.000001,
                0.0001,
                0.001,
                0.01,
                0.0005,
                0.005,
                0.05,
                0.1,
                0.15,
                0.2,
                0.3,
                0.4,
                0.5,
                0.7,
                1,
                2,
                3,
                4,
                5,
                10,
            ],
            "reg_lambda": [
                0.0000001,
                0.000001,
                0.0001,
                0.001,
                0.01,
                0.0005,
                0.005,
                0.05,
                0.1,
                0.15,
                0.2,
                0.3,
                0.4,
                0.5,
                0.7,
                1,
                2,
                3,
                4,
                5,
                10,
            ],
            "feature_fraction": np_list_arange(0.4, 1, 0.1, inclusive=True),
            "bagging_fraction": np_list_arange(0.4, 1, 0.1, inclusive=True),
            "bagging_freq": [1, 2, 3, 4, 5, 6, 7],
            "min_child_samples": np_list_arange(5, 100, 5, inclusive=True),
        }
        tune_distributions = {
            "num_leaves": IntUniformDistribution(10, 200),
            "learning_rate": UniformDistribution(0.000001, 0.5, log=True),
            "n_estimators": IntUniformDistribution(10, 300),
            "min_split_gain": UniformDistribution(0, 1),
            "reg_alpha": UniformDistribution(0.0000000001, 10, log=True),
            "reg_lambda": UniformDistribution(0.0000000001, 10, log=True),
            "min_data_in_leaf": IntUniformDistribution(10, 10000),
            "feature_fraction": UniformDistribution(0.4, 1),
            "bagging_fraction": UniformDistribution(0.4, 1),
            "bagging_freq": IntUniformDistribution(1, 7),
            "min_child_samples": IntUniformDistribution(5, 100),
        }

        self.tune_grid = tune_grid
        self.tune_distributions = tune_distributions
        self.estimator = LGBMRegressor(**kwargs)
Exemplo n.º 18
0
    def __init__(self, **kwargs):
        super().__init__()
        tune_grid = {}
        tune_distributions = {}

        tune_grid = {
            "learning_rate":
            np_list_arange(0.001, 0.5, 0.001, inclusive=True),
            "n_estimators":
            np_list_arange(10, 300, 10, inclusive=True),
            "subsample": [0.2, 0.3, 0.5, 0.7, 0.9, 1],
            "max_depth":
            np_list_arange(1, 11, 1, inclusive=True),
            "colsample_bytree": [0.5, 0.7, 0.9, 1],
            "min_child_weight": [1, 2, 3, 4],
            "reg_alpha": [
                0.0000001,
                0.000001,
                0.0001,
                0.001,
                0.01,
                0.0005,
                0.005,
                0.05,
                0.1,
                0.15,
                0.2,
                0.3,
                0.4,
                0.5,
                0.7,
                1,
                2,
                3,
                4,
                5,
                10,
            ],
            "reg_lambda": [
                0.0000001,
                0.000001,
                0.0001,
                0.001,
                0.01,
                0.0005,
                0.005,
                0.05,
                0.1,
                0.15,
                0.2,
                0.3,
                0.4,
                0.5,
                0.7,
                1,
                2,
                3,
                4,
                5,
                10,
            ],
            "scale_pos_weight":
            np_list_arange(0, 50, 0.1, inclusive=True),
        }
        tune_distributions = {
            "learning_rate": UniformDistribution(0.000001, 0.5, log=True),
            "n_estimators": IntUniformDistribution(10, 300),
            "subsample": UniformDistribution(0.2, 1),
            "max_depth": IntUniformDistribution(1, 11),
            "colsample_bytree": UniformDistribution(0.5, 1),
            "min_child_weight": IntUniformDistribution(1, 4),
            "reg_alpha": UniformDistribution(0.0000000001, 10, log=True),
            "reg_lambda": UniformDistribution(0.0000000001, 10, log=True),
            "scale_pos_weight": UniformDistribution(1, 50),
        }

        self.tune_grid = tune_grid
        self.tune_distributions = tune_distributions
        self.estimator = XGBRegressor(**kwargs)