def test_boxcox_transform(): y = load_airline() t = TabularToSeriesAdaptor(PowerTransformer(method="box-cox", standardize=False)) actual = t.fit_transform(y) expected, _ = boxcox(np.asarray(y)) # returns fitted lambda as second output np.testing.assert_array_equal(actual, expected)
def compute_expected_y_pred(y_train, fh): # fitting yt = y_train.copy() t1 = ExponentTransformer() yt = t1.fit_transform(yt) t2 = TabularToSeriesAdaptor(MinMaxScaler()) yt = t2.fit_transform(yt) forecaster = NaiveForecaster() forecaster.fit(yt, fh=fh) # predicting y_pred = forecaster.predict() y_pred = t2.inverse_transform(y_pred) y_pred = t1.inverse_transform(y_pred) return y_pred
def test_pipeline(): """Test results of TransformedTargetForecaster.""" y = load_airline() y_train, y_test = temporal_train_test_split(y) forecaster = TransformedTargetForecaster([ ("t1", ExponentTransformer()), ("t2", TabularToSeriesAdaptor(MinMaxScaler())), ("forecaster", NaiveForecaster()), ]) fh = np.arange(len(y_test)) + 1 forecaster.fit(y_train, fh=fh) actual = forecaster.predict() def compute_expected_y_pred(y_train, fh): # fitting yt = y_train.copy() t1 = ExponentTransformer() yt = t1.fit_transform(yt) t2 = TabularToSeriesAdaptor(MinMaxScaler()) yt = t2.fit_transform(yt) forecaster = NaiveForecaster() forecaster.fit(yt, fh=fh) # predicting y_pred = forecaster.predict() y_pred = t2.inverse_transform(y_pred) y_pred = t1.inverse_transform(y_pred) return y_pred expected = compute_expected_y_pred(y_train, fh) np.testing.assert_array_equal(actual, expected)
def get_test_params(cls): """Return testing parameter settings for the estimator. Returns ------- params : dict or list of dict, default = {} Parameters to create testing instances of the class Each dict are parameters to construct an "interesting" test instance, i.e., `MyClass(**params)` or `MyClass(**params[i])` creates a valid test instance. `create_test_instance` uses the first (or only) dictionary in `params` """ from sklearn.preprocessing import StandardScaler from sktime.forecasting.naive import NaiveForecaster from sktime.transformations.series.adapt import TabularToSeriesAdaptor from sktime.transformations.series.boxcox import BoxCoxTransformer STEPS1 = [ ("transformer", TabularToSeriesAdaptor(StandardScaler())), ("forecaster", NaiveForecaster()), ] params1 = {"steps": STEPS1} STEPS2 = [ ("transformer", BoxCoxTransformer()), ("forecaster", NaiveForecaster()), ] params2 = {"steps": STEPS2} return [params1, params2]
def get_test_params(cls, parameter_set="default"): """Return testing parameter settings for the estimator. Parameters ---------- parameter_set : str, default="default" Name of the set of test parameters to return, for use in tests. If no special parameters are defined for a value, will return `"default"` set. Returns ------- params : dict or list of dict, default = {} Parameters to create testing instances of the class Each dict are parameters to construct an "interesting" test instance, i.e., `MyClass(**params)` or `MyClass(**params[i])` creates a valid test instance. `create_test_instance` uses the first (or only) dictionary in `params` """ from sklearn.preprocessing import StandardScaler from sktime.forecasting.arima import ARIMA from sktime.forecasting.naive import NaiveForecaster from sktime.transformations.series.adapt import TabularToSeriesAdaptor from sktime.transformations.series.exponent import ExponentTransformer # StandardScaler does not skip fit, NaiveForecaster is not probabilistic STEPS1 = [ ("transformer", TabularToSeriesAdaptor(StandardScaler())), ("forecaster", NaiveForecaster()), ] params1 = {"steps": STEPS1} # ARIMA has probabilistic methods, ExponentTransformer skips fit STEPS2 = [ ("transformer", ExponentTransformer()), ("forecaster", ARIMA()), ] params2 = {"steps": STEPS2} return [params1, params2]
def __rmul__(self, other): """Magic * method, return concatenated ClassifierPipeline, transformers on left. Implemented for `other` being a transformer, otherwise returns `NotImplemented`. Parameters ---------- other: `sktime` transformer, must inherit from BaseTransformer otherwise, `NotImplemented` is returned Returns ------- ClassifierPipeline object, concatenation of `other` (first) with `self` (last). """ from sktime.classification.compose import ClassifierPipeline from sktime.transformations.base import BaseTransformer from sktime.transformations.compose import TransformerPipeline from sktime.transformations.series.adapt import TabularToSeriesAdaptor # behaviour is implemented only if other inherits from BaseTransformer # in that case, distinctions arise from whether self or other is a pipeline # todo: this can probably be simplified further with "zero length" pipelines if isinstance(other, BaseTransformer): # ClassifierPipeline already has the dunder method defined if isinstance(self, ClassifierPipeline): return other * self # if other is a TransformerPipeline but self is not, first unwrap it elif isinstance(other, TransformerPipeline): return ClassifierPipeline(classifier=self, transformers=other.steps) # if neither self nor other are a pipeline, construct a ClassifierPipeline else: return ClassifierPipeline(classifier=self, transformers=[other]) elif is_sklearn_transformer(other): return TabularToSeriesAdaptor(other) * self else: return NotImplemented
"transformer2", SeriesToSeriesRowTransformer(SERIES_TO_SERIES_TRANSFORMER, check_transformer=False), ), ] REGRESSOR = LinearRegression() ANOMALY_DETECTOR = KNN() TIME_SERIES_CLASSIFIER = TSFC(n_estimators=3) TIME_SERIES_CLASSIFIERS = [ ("tsf1", TIME_SERIES_CLASSIFIER), ("tsf2", TIME_SERIES_CLASSIFIER), ] FORECASTER = NaiveForecaster() FORECASTERS = [("f1", FORECASTER), ("f2", FORECASTER)] STEPS = [ ("transformer", TabularToSeriesAdaptor(StandardScaler())), ("forecaster", NaiveForecaster()), ] ESTIMATOR_TEST_PARAMS = { ColumnEnsembleForecaster: { "forecasters": FORECASTER }, OnlineEnsembleForecaster: { "forecasters": FORECASTERS }, FeatureUnion: { "transformer_list": TRANSFORMERS }, DirectTabularRegressionForecaster: { "estimator": REGRESSOR },