Ejemplo n.º 1
0
 def test_transform(self):
     ri = ReversibleImputer()
     X = np.array([1, 1, 100, 0, 2], dtype=np.float64)[:, np.newaxis]
     X[2] = np.nan
     Xt = ri.fit_transform(X)
     expected = np.array([1, 1, 1, 0, 2], dtype=np.float64)[:, np.newaxis]
     assert np.allclose(expected, Xt)
def make_pipeline(model):
    steps = [('features', FeatureUnion([
        ('seasonal_features', SeasonalTransformer(seasonal_period=1)),
        ('ar_features', AutoregressiveTransformer(num_lags=1))
    ]))]

    #steps = list() 
    steps.append(('post_feature_imputer', ReversibleImputer()))
    # standardization    
    steps.append(('standardize', StandardScaler()))
    # normalization
    steps.append(('normalize', MinMaxScaler()))
    # the model
    steps.append(('model', model))
    # create pipeline
    #pipeline = ForecasterPipeline(steps=steps)
    pipeline = ForecasterPipeline([
    ('pre_differencer', DifferenceTransformer(period=1)),
    ('pre_diff_imputer', ReversibleImputer()),
    ('pre_day_differencer', DifferenceTransformer(period=1)),
    ('pre_day_diff_imputer', ReversibleImputer()),
    ('pre_scaler', StandardScaler()),
    ('features', FeatureUnion([
        ('ar_features', AutoregressiveTransformer(num_lags=1)),
        ('seasonal_features', SeasonalTransformer(seasonal_period=1)),
    ])),
    ('post_feature_imputer', ReversibleImputer()),
    ('post_feature_scaler', StandardScaler()),
    ('model', LinearRegression(fit_intercept=False))])

    
    return pipeline
Ejemplo n.º 3
0
    def test_multiouput_prediction(self):
        # TODO: Make this a real test

        steps = [('pre_horizon', HorizonTransformer(horizon=4)),
                 ('pre_imputer', ReversibleImputer(y_only=True)),
                 ('features',
                  FeatureUnion([('ar_transformer',
                                 AutoregressiveTransformer(num_lags=3))])),
                 ('post_lag_imputer', ReversibleImputer()),
                 ('regressor', LinearRegression())]

        pipeline = ForecasterPipeline(steps)

        l = np.linspace(0, 1, 100)
        y = np.sin(2 * np.pi * 5 * l) + np.random.normal(0, .1, size=100)

        pipeline.fit(y[:, np.newaxis], y)

        pipeline.predict(y[:, np.newaxis], to_scale=True, refit=True)
Ejemplo n.º 4
0
 def get_pipeline(self):
     regressor = None
     if self.learning_method == "linear":
         regressor = MultiOutputRegressor(LinearRegression(fit_intercept=self.fit_intercept),
                                          n_jobs=6)
     elif self.learning_method == "booster":
         regressor = MultiOutputRegressor(XGBRegressor(n_jobs=12,
                                                       n_estimators=self.no_estimators))
     elif self.learning_method == "deep":
         regressor = NeuralNetRegressor(
             module=TemporalConvNet,
             module__num_inputs=1,
             module__num_channels=[2] * self.no_channels,
             module__output_sz=self.horizon,
             module__kernel_size=5,
             module__dropout=0.0,
             max_epochs=60,
             batch_size=256,
             lr=2e-3,
             optimizer=torch.optim.Adam,
             device='cpu',
             iterator_train__shuffle=True,
             callbacks=[GradientNormClipping(gradient_clip_value=1,
                                             gradient_clip_norm_type=2)],
             train_split=None,
         )
     return ForecasterPipeline([
         # Convert the `y` target into a horizon
         ('pre_horizon', HorizonTransformer(horizon=self.horizon)),
         ('pre_reversible_imputer', ReversibleImputer(y_only=True)),
         ('features', FeatureUnion([
             # Generate a week's worth of autoregressive features
             ('ar_features', AutoregressiveTransformer(
                 num_lags=int(self.horizon * self.num_lags), pred_stride=self.pred_stride)),
         ])),
         ('post_feature_imputer', ReversibleImputer()),
         ('regressor', regressor)
     ])
Ejemplo n.º 5
0
    def test_multiouput_forecast(self):
        # TODO: Make this a real test

        steps = [
            ("pre_horizon", HorizonTransformer(horizon=4)),
            ("pre_imputer", ReversibleImputer(y_only=True)),
            (
                "features",
                FeatureUnion([("ar_transformer",
                               AutoregressiveTransformer(num_lags=3))]),
            ),
            ("post_lag_imputer", ReversibleImputer()),
            ("regressor", LinearRegression()),
        ]

        pipeline = ForecasterPipeline(steps)

        l = np.linspace(0, 1, 100)
        y = np.sin(2 * np.pi * 5 * l) + np.random.normal(0, 0.1, size=100)

        pipeline.fit(y[:, np.newaxis], y)

        pipeline.forecast(y[:, np.newaxis], 20)
Ejemplo n.º 6
0
class TestPipelines:

    steps = [
        ('pre_differencer', DifferenceTransformer(period=1)),
        ('pre_imputer_1', ReversibleImputer()),
        ('features',
         FeatureUnion([
             ('ar_transformer', AutoregressiveTransformer(num_lags=3)),
             ('seasonal_transformer', SeasonalTransformer(seasonal_period=4))
         ])),
        ('post_lag_imputer_2', ReversibleImputer()),
    ]

    dt = DifferenceTransformer(period=1)
    ri1 = ReversibleImputer()
    fe = FeatureUnion([
        ('ar_transformer', AutoregressiveTransformer(num_lags=3)),
        ('seasonal_transformer', SeasonalTransformer(seasonal_period=4))
    ])
    ri2 = ReversibleImputer()

    def test_predict(self):
        # Let's just see if it works
        # TODO: Make this a real test
        np.random.seed(SEED)
        l = np.linspace(0, 1, 100)
        y = np.sin(2 * np.pi * 5 * l) + np.random.normal(0, .1, size=100)

        # Ignore the DifferenceTransformer. It's actually bad.
        steps = list(self.steps[1:])
        steps.append(('regressor', LinearRegression(fit_intercept=False)))

        pipeline = ForecasterPipeline(steps)

        pipeline.fit(y[:, np.newaxis], y)
        y_pred = pipeline.predict(y[:, np.newaxis], to_scale=True, refit=True)
        assert np.mean((y_pred - y.squeeze())**2) < 0.05

    def test_forecast(self):
        # Let's just see if it works
        # TODO: Make this a real test

        l = np.linspace(0, 1, 100)
        y = np.sin(2 * np.pi * 5 * l) + np.random.normal(0, .1, size=100)

        steps = list(self.steps)
        steps.append(('regressor', LinearRegression(fit_intercept=False)))

        pipeline = ForecasterPipeline(steps)
        pipeline.fit(y[:, np.newaxis], y)

        pipeline.forecast(y[:, np.newaxis], 20)

    def test_classifier(self):
        # Let's just see if it works
        # TODO: Make this a real test
        np.random.seed(SEED)

        l = np.linspace(0, 1, 100)
        y = np.sin(2 * np.pi * 5 * l) + np.random.normal(0, .1, size=100)

        steps = list(self.steps)
        steps.append(('classifier',
                      LogisticRegression(solver='lbfgs', fit_intercept=False)))

        pipeline = ClassifierPipeline(steps)

        y_true = y > 0
        pipeline.fit(y[:, np.newaxis], y_true)
        y_pred = pipeline.predict(y[:, np.newaxis])
        assert (y_pred == y_true).mean() > 0.75

    def test_multiouput_prediction(self):
        # TODO: Make this a real test

        steps = [('pre_horizon', HorizonTransformer(horizon=4)),
                 ('pre_imputer', ReversibleImputer(y_only=True)),
                 ('features',
                  FeatureUnion([('ar_transformer',
                                 AutoregressiveTransformer(num_lags=3))])),
                 ('post_lag_imputer', ReversibleImputer()),
                 ('regressor', LinearRegression())]

        pipeline = ForecasterPipeline(steps)

        l = np.linspace(0, 1, 100)
        y = np.sin(2 * np.pi * 5 * l) + np.random.normal(0, .1, size=100)

        pipeline.fit(y[:, np.newaxis], y)

        pipeline.predict(y[:, np.newaxis], to_scale=True, refit=True)

    def test_multiouput_forecast(self):
        # TODO: Make this a real test

        steps = [('pre_horizon', HorizonTransformer(horizon=4)),
                 ('pre_imputer', ReversibleImputer(y_only=True)),
                 ('features',
                  FeatureUnion([('ar_transformer',
                                 AutoregressiveTransformer(num_lags=3))])),
                 ('post_lag_imputer', ReversibleImputer()),
                 ('regressor', LinearRegression())]

        pipeline = ForecasterPipeline(steps)

        l = np.linspace(0, 1, 100)
        y = np.sin(2 * np.pi * 5 * l) + np.random.normal(0, .1, size=100)

        pipeline.fit(y[:, np.newaxis], y)

        pipeline.forecast(y[:, np.newaxis], 20)
Ejemplo n.º 7
0
 def test_inverse_transform(self):
     ri = ReversibleImputer()
     X = np.random.random(20)[:, np.newaxis]
     X[[0, 5, 13], :] = np.nan
     X_inv = ri.inverse_transform(ri.fit_transform(X))
     assert np.allclose(X, X_inv, equal_nan=True)