def __init__(self, binner=None, estimator=None, n_jobs=None, verbose=False):
        """
        @param      binner              transformer or predictor which creates the buckets
        @param      estimator           predictor trained on every bucket
        @param      n_jobs              number of parallel jobs (for training and predicting)
        @param      verbose             boolean or use ``'tqdm'`` to use :epkg:`tqdm`
                                        to fit the estimators

        *binner* allows the following values:

        - ``tree``: the model is :epkg:`sklearn:tree:DecisionTreeRegressor`
        - ``'bins'``: the model :epkg:`sklearn:preprocessing:KBinsDiscretizer`
        - any instanciated model

        *estimator* allows the following values:

        - ``None``: the model is :epkg:`sklearn:linear_model:LinearRegression`
        - any instanciated model
        """
        if estimator is None:
            estimator = LinearRegression()
        if binner in ('tree', None):
            binner = DecisionTreeRegressor(min_samples_leaf=2)
        RegressorMixin.__init__(self)
        PiecewiseEstimator.__init__(self, binner=binner, estimator=estimator,
                                    n_jobs=n_jobs, verbose=verbose)
 def __init__(self, num_inputs, mxseed=0, epochs=5000, net_type=1):
     BaseEstimator.__init__(self)
     RegressorMixin.__init__(self)
     self.net = None
     self.num_inputs = num_inputs
     self.mxseed = mxseed
     self.epochs = epochs
     self.net_type = net_type
     return
 def __init__(self, model='SIR', t=0, max_iter=100,
              learning_rate_init=0.1, lr_schedule='constant',
              momentum=0.9, power_t=0.5, early_th=None,
              min_threshold='auto', max_threshold='auto',
              verbose=False, init=None):
     if init is not None:
         if isinstance(init, EpidemicRegressor):
             if hasattr(init, 'coef_'):
                 init = init.coef_.copy()
             else:
                 init = None  # pragma: no cover
         elif not isinstance(init, dict):
             raise TypeError(
                 f"init must be a dictionary not {type(init)}.")
     BaseEstimator.__init__(self)
     RegressorMixin.__init__(self)
     self.t = t
     self.model = model
     self.max_iter = max_iter
     self.learning_rate_init = learning_rate_init
     self.lr_schedule = lr_schedule
     self.momentum = momentum
     self.power_t = power_t
     self.early_th = early_th
     self.verbose = verbose
     if min_threshold == 'auto':
         if model.upper() in ('SIR', 'SIRD'):
             min_threshold = 0.0001
         elif model.upper() in ('SIRC', ):
             pmin = dict(beta=0.001, nu=0.0001, mu=0.0001,
                         a=-1., b=0., c=0.)
             min_threshold = numpy.array(
                 [pmin[k[0]] for k in CovidSIRDc.P0])
         elif model.upper() in ('SIRDC'):
             pmin = dict(beta=0.001, nu=0.001, mu=0.001,
                         a=-1., b=0., c=0.)
             min_threshold = numpy.array(
                 [pmin[k[0]] for k in CovidSIRDc.P0])
     if max_threshold == 'auto':
         if model.upper() in ('SIR', 'SIRD'):
             max_threshold = 1.
         elif model.upper() in ('SIRC', 'SIRDC'):
             pmax = dict(beta=1., nu=0.5, mu=0.5,
                         a=0., b=4., c=2.)
             max_threshold = numpy.array(
                 [pmax[k[0]] for k in CovidSIRDc.P0])
     self.min_threshold = min_threshold
     self.max_threshold = max_threshold
     self._get_model()
     self.init = init
     if init is not None:
         self.coef_ = init
Exemple #4
0
 def __init__(self, force_positive=False, **kwargs):
     """
     *kwargs* should contains parameters
     for :epkg:`sklearn:decomposition:NMF`.
     The parameter *force_positive* removes all
     negative predictions and replaces by zero.
     """
     BaseEstimator.__init__(self)
     RegressorMixin.__init__(self)
     MultiOutputMixin.__init__(self)
     for k, v in kwargs.items():
         setattr(self, k, v)
     self.force_positive = force_positive
 def __init__(self, rf_estimator=None, lasso_estimator=None):
     """
     @param  rf_estimator    random forest estimator,
                             :epkg:`sklearn:ensemble:RandomForestRegressor`
                             by default
     @param  lass_estimator  Lasso estimator,
                             :epkg:`sklearn:linear_model:LassoRegression`
                             by default
     """
     BaseEstimator.__init__(self)
     RegressorMixin.__init__(self)
     if rf_estimator is None:
         rf_estimator = RandomForestRegressor()
     if lasso_estimator is None:
         lasso_estimator = Lasso()
     self.rf_estimator = rf_estimator
     self.lasso_estimator = lasso_estimator
Exemple #6
0
 def __init__(self, estimator=None, n_estimators=10, n_jobs=None,
              alpha=1., verbose=False):
     """
     @param      estimator           predictor trained on every bucket
     @param      n_estimators        number of estimators to train
     @param      n_jobs              number of parallel jobs (for training and predicting)
     @param      alpha               proportion of samples resampled for each training
     @param      verbose             boolean or use ``'tqdm'`` to use :epkg:`tqdm`
                                     to fit the estimators
     """
     BaseEstimator.__init__(self)
     RegressorMixin.__init__(self)
     if estimator is None:
         raise ValueError("estimator cannot be null.")
     self.estimator = estimator
     self.n_jobs = n_jobs
     self.alpha = alpha
     self.verbose = verbose
     self.n_estimators = n_estimators
Exemple #7
0
 def __init__(self):
     RegressorMixin.__init__(self)
     BaseEstimator.__init__(self)
 def __init__(self, base_estimator):
     RegressorMixin.__init__(self)
     BaseEstimator.__init__(self)
     self.base_estimator = base_estimator