def __init__(self,
              base_estimator=None,
              n_estimators=10,
              max_samples=1.0,
              max_features=1.0,
              bootstrap=True,
              bootstrap_features=False,
              oob_score=False,
              warm_start=False,
              n_jobs=None,
              random_state=None,
              verbose=0):
     self._hyperparams = {
         'base_estimator': make_sklearn_compat(base_estimator),
         'n_estimators': n_estimators,
         'max_samples': max_samples,
         'max_features': max_features,
         'bootstrap': bootstrap,
         'bootstrap_features': bootstrap_features,
         'oob_score': oob_score,
         'warm_start': warm_start,
         'n_jobs': n_jobs,
         'random_state': random_state,
         'verbose': verbose
     }
     self._wrapped_model = SKLModel(**self._hyperparams)
Beispiel #2
0
    def test_clone_operator_choice(self):
        from sklearn.model_selection import cross_val_score
        from sklearn.metrics import accuracy_score, make_scorer
        from sklearn.base import clone
        from sklearn.datasets import load_iris
        iris = load_iris()
        X, y = iris.data, iris.target

        lr = LogisticRegression()
        trainable = PCA() >> lr
        trainable_wrapper = make_sklearn_compat(trainable)
        trainable2 = clone(trainable_wrapper)
        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            result = cross_val_score(trainable_wrapper,
                                     X,
                                     y,
                                     scoring=make_scorer(accuracy_score),
                                     cv=2)
            result2 = cross_val_score(trainable2,
                                      X,
                                      y,
                                      scoring=make_scorer(accuracy_score),
                                      cv=2)
        for i in range(len(result)):
            self.assertEqual(result[i], result2[i])
Beispiel #3
0
 def __init__(
     self,
     base_estimator=None,
     n_estimators=10,
     max_samples=1.0,
     max_features=1.0,
     bootstrap=True,
     bootstrap_features=False,
     oob_score=False,
     warm_start=False,
     n_jobs=None,
     random_state=None,
     verbose=0,
 ):
     self._hyperparams = {
         "base_estimator": make_sklearn_compat(base_estimator),
         "n_estimators": n_estimators,
         "max_samples": max_samples,
         "max_features": max_features,
         "bootstrap": bootstrap,
         "bootstrap_features": bootstrap_features,
         "oob_score": oob_score,
         "warm_start": warm_start,
         "n_jobs": n_jobs,
         "random_state": random_state,
         "verbose": verbose,
     }
     self._wrapped_model = SKLModel(**self._hyperparams)
Beispiel #4
0
    def test_clone_operator_choice(self):
        from sklearn.base import clone

        lr = LogisticRegression()
        trainable = (PCA() | NoOp) >> lr
        trainable_wrapper = make_sklearn_compat(trainable)
        _ = clone(trainable_wrapper)
        _ = clone(trainable)
Beispiel #5
0
def lale_trainable_op_from_config(op: 'Ops.PlannedOperator',
                                  cfg) -> 'Ops.TrainableOperator':
    from sklearn.base import clone

    op_compat = make_sklearn_compat(op)

    wrapped_op = clone(op_compat)
    cfg2 = smac_fixup_params(cfg)
    trainable = wrapped_op.set_params(**cfg2)
    return trainable
Beispiel #6
0
 def test_wrap_from_instance(self):
     from lale.operators import make_operator, TrainableIndividualOp
     from lale.sklearn_compat import make_sklearn_compat
     from sklearn.base import clone
     self.assertFalse(isinstance(UnknownOp, TrainableIndividualOp))
     instance = UnknownOp(n_neighbors=3)
     self.assertFalse(isinstance(instance, TrainableIndividualOp))
     wrapped = make_operator(instance)
     self.assertTrue(isinstance(wrapped, TrainableIndividualOp))
     self.assertEqual(wrapped.hyperparams(), {'n_neighbors': 3})
     cloned = clone(make_sklearn_compat(wrapped)).to_lale()
     self.assertTrue(isinstance(cloned, TrainableIndividualOp))
     self.assertEqual(cloned.hyperparams(), {'n_neighbors': 3})
Beispiel #7
0
def lale_op_smac_tae(op: 'Ops.PlannedOperator', f_min):
    # TODO: we can probably do this in a different way, but get_smac_configuration_space
    # we already have these sklearn compatibility wrappers it is easier for now to use them
    op_compat = make_sklearn_compat(op)

    def f(cfg):
        from sklearn.base import clone
        wrapped_op = clone(op_compat)
        cfg2 = smac_fixup_params(cfg)
        trainable = wrapped_op.set_params(**cfg2)

        return f_min(trainable)

    return f
Beispiel #8
0
 def test_sklearn_get_param_ranges_and_dist(self):
     for op in [
             ConcatFeatures,
             KNeighborsClassifier,
             LogisticRegression,
             MLPClassifier,
             Nystroem,
             OneHotEncoder,
             PCA,
             RandomForestClassifier,
     ]:
         skop = make_sklearn_compat(op)
         self.validate_get_param_ranges(skop)
         self.validate_get_param_dist(skop)
Beispiel #9
0
    def test_wrap_from_instance(self):
        from sklearn.base import clone

        from lale.operators import TrainableIndividualOp, make_operator
        from lale.sklearn_compat import make_sklearn_compat

        self.assertFalse(isinstance(UnknownOp, TrainableIndividualOp))
        instance = UnknownOp(n_neighbors=3)
        self.assertFalse(isinstance(instance, TrainableIndividualOp))
        wrapped = make_operator(instance)
        self.assertTrue(isinstance(wrapped, TrainableIndividualOp))
        assert isinstance(
            wrapped, TrainableIndividualOp
        )  # help type checkers that don't know about assertTrue
        self.assertEqual(wrapped.hyperparams(), {"n_neighbors": 3})
        cloned = clone(make_sklearn_compat(wrapped)).to_lale()
        self.assertTrue(isinstance(cloned, TrainableIndividualOp))
        self.assertEqual(cloned.hyperparams(), {"n_neighbors": 3})
Beispiel #10
0
def LaleGridSearchCV(
        op:'PlannedOperator', 
        lale_num_samples:Optional[int]=None, 
        lale_num_grids:Optional[float]=None, 
        lale_pgo:Optional[PGO]=None,
        **kwargs):
    """
    Parameters
    ----------
    op : The lale PlannedOperator
    lale_num_samples : integer, optional
        If set, will limit the number of samples for each distribution
    lale_num_grids: integer or float, optional
        if set to an integer => 1, it will determine how many parameter grids will be returned (at most)
        if set to an float between 0 and 1, it will determine what fraction should be returned
        note that setting it to 1 is treated as in integer.  To return all results, use None
    """

    params = get_parameter_grids(op, num_samples=lale_num_samples, num_grids=lale_num_grids, pgo=lale_pgo)
    if not params and isinstance(op, Ops.IndividualOp):
        params = [get_defaults_as_param_grid(op)]
    return get_lale_gridsearchcv_op(make_sklearn_compat(op), params, **kwargs)
Beispiel #11
0
def get_lale_gridsearchcv_op(op, params, **kwargs):
    g = GridSearchCV(make_sklearn_compat(op), params, **kwargs)
    return g
Beispiel #12
0
def fit_clone_fit(op):
  op1 = make_sklearn_compat(op)
  op1.fit(X=[1,2], y=[1,2])
  op2 = clone(op1)
  fit2 = op2.fit(X=[3,4], y=[3,4])
  print(fit2)
Beispiel #13
0
def get_lale_gridsearchcv_op(op, params, **kwargs):
    g = sklearn.model_selection.GridSearchCV(
        make_sklearn_compat(op), params, **kwargs)
    return g
Beispiel #14
0
 def __init__(self, estimator, **hyperparams):
     self._hyperparams = {"estimator": make_sklearn_compat(estimator), **hyperparams}
     self._wrapped_model = SKLModel(**self._hyperparams)
Beispiel #15
0
 def __init__(self, operator=None, resampler=None):
     self.operator = make_sklearn_compat(operator)
     self.resampler = resampler