예제 #1
0
    def test_regressor(self):
        X_train, y_train = self.X_train, self.y_train
        import importlib

        module_name = ".".join(clf_name.split(".")[0:-1])
        class_name = clf_name.split(".")[-1]
        module = importlib.import_module(module_name)

        class_ = getattr(module, class_name)
        regr = None
        if class_name in ["StackingRegressor", "VotingRegressor"]:
            regr = class_(estimators=[("base", SGDRegressor())])
        else:
            regr = class_()

        # test_schemas_are_schemas
        lale.type_checking.validate_is_schema(regr.input_schema_fit())
        lale.type_checking.validate_is_schema(regr.input_schema_predict())
        lale.type_checking.validate_is_schema(regr.output_schema_predict())
        lale.type_checking.validate_is_schema(regr.hyperparam_schema())

        # test_init_fit_predict
        trained = regr.fit(self.X_train, self.y_train)
        _ = trained.predict(self.X_test)

        # test score
        _ = trained.score(self.X_test, self.y_test)

        # test_predict_on_trainable
        trained = regr.fit(X_train, y_train)
        regr.predict(X_train)

        # test_to_json
        regr.to_json()

        # test_in_a_pipeline
        pipeline = NoOp() >> regr
        trained = pipeline.fit(self.X_train, self.y_train)
        _ = trained.predict(self.X_test)

        # test_with_hyperopt
        from lale.lib.sklearn.ridge import Ridge

        if isinstance(regr, Ridge):  # type: ignore
            from lale.lib.lale import Hyperopt

            hyperopt = Hyperopt(estimator=pipeline, max_evals=1)
            trained = hyperopt.fit(self.X_train, self.y_train)
            _ = trained.predict(self.X_test)
예제 #2
0
 def test_sgd_regressor_3(self):
     reg = SGDRegressor(l1_ratio=0.2, penalty="l1")
     reg.fit(self.X_train, self.y_train)
예제 #3
0
 def test_sgd_regressor_1(self):
     reg = SGDRegressor(learning_rate="optimal", eta0=0.2)
     reg.fit(self.X_train, self.y_train)
예제 #4
0
 def test_sgd_regressor_2(self):
     reg = SGDRegressor(early_stopping=False, validation_fraction=0.2)
     reg.fit(self.X_train, self.y_train)
예제 #5
0
 def test_sgd_regressor(self):
     reg = SGDRegressor(loss="squared_loss", epsilon=0.2)
     reg.fit(self.X_train, self.y_train)
예제 #6
0
    def test_sgd_regressor_3(self):
        from sklearn.linear_model import SGDRegressor

        reg = SGDRegressor(l1_ratio=0.2, penalty='l1')
        reg.fit(self.X_train, self.y_train)
예제 #7
0
    def test_sgd_regressor_2(self):
        from lale.lib.sklearn import SGDRegressor

        reg = SGDRegressor(early_stopping=False, validation_fraction=0.2)
        reg.fit(self.X_train, self.y_train)
예제 #8
0
    def test_sgd_regressor_1(self):
        from lale.lib.sklearn import SGDRegressor

        reg = SGDRegressor(learning_rate='optimal', eta0=0.2)
        reg.fit(self.X_train, self.y_train)
예제 #9
0
    def test_sgd_regressor(self):
        from lale.lib.sklearn import SGDRegressor

        reg = SGDRegressor(loss='squared_loss', epsilon=0.2)
        reg.fit(self.X_train, self.y_train)