Ejemplo n.º 1
0
 def create_transformation(self) -> transform.Transformation:
     return transform.Chain(
         trans=[
             transform.AsNumpyArray(
                 field=FieldName.TARGET, expected_ndim=1
             ),
             transform.AddTimeFeatures(
                 start_field=transform.FieldName.START,
                 target_field=transform.FieldName.TARGET,
                 output_field=transform.FieldName.FEAT_TIME,
                 time_features=time_features_from_frequency_str(self.freq),
                 pred_length=self.prediction_length,
             ),
             transform.VstackFeatures(
                 output_field=FieldName.FEAT_DYNAMIC_REAL,
                 input_fields=[FieldName.FEAT_TIME],
             ),
             transform.SetFieldIfNotPresent(
                 field=FieldName.FEAT_STATIC_CAT, value=[0.0]
             ),
             transform.AsNumpyArray(
                 field=FieldName.FEAT_STATIC_CAT, expected_ndim=1
             ),
             transform.InstanceSplitter(
                 target_field=transform.FieldName.TARGET,
                 is_pad_field=transform.FieldName.IS_PAD,
                 start_field=transform.FieldName.START,
                 forecast_start_field=transform.FieldName.FORECAST_START,
                 train_sampler=ExpectedNumInstanceSampler(num_instances=1),
                 past_length=self.context_length,
                 future_length=self.prediction_length,
                 time_series_fields=[FieldName.FEAT_DYNAMIC_REAL],
             ),
         ]
     )
Ejemplo n.º 2
0
 def create_transformation(self) -> transform.Transformation:
     return transform.Chain(
         trans=[
             transform.AsNumpyArray(
                 field=FieldName.TARGET, expected_ndim=1
             ),
             transform.AddTimeFeatures(
                 start_field=FieldName.START,
                 target_field=FieldName.TARGET,
                 output_field=FieldName.FEAT_TIME,
                 time_features=time_features_from_frequency_str(self.freq),
                 pred_length=self.prediction_length,
             ),
             transform.VstackFeatures(
                 output_field=FieldName.FEAT_DYNAMIC_REAL,
                 input_fields=[FieldName.FEAT_TIME],
             ),
             transform.SetFieldIfNotPresent(
                 field=FieldName.FEAT_STATIC_CAT, value=[0.0]
             ),
             transform.AsNumpyArray(
                 field=FieldName.FEAT_STATIC_CAT, expected_ndim=1
             ),
         ]
     )
Ejemplo n.º 3
0
def test_map_transformation():
    tran = transform.VstackFeatures(
        output_field="dynamic_feat",
        input_fields=["age", "time_feat"],
        drop_inputs=True,
    )

    assert equals(tran, clone(tran))
    assert not equals(tran, clone(tran, {"drop_inputs": False}))
Ejemplo n.º 4
0
def test_Transformation():
    train_length = 100
    ds = gluonts.dataset.common.ListDataset(
        [{"start": "2012-01-01", "target": [0.2] * train_length}], freq="1D"
    )

    pred_length = 10

    t = transform.Chain(
        trans=[
            transform.AddTimeFeatures(
                start_field=transform.FieldName.START,
                target_field=transform.FieldName.TARGET,
                output_field="time_feat",
                time_features=[
                    time_feature.DayOfWeek(),
                    time_feature.DayOfMonth(),
                    time_feature.MonthOfYear(),
                ],
                pred_length=pred_length,
            ),
            transform.AddAgeFeature(
                target_field=transform.FieldName.TARGET,
                output_field="age",
                pred_length=pred_length,
                log_scale=True,
            ),
            transform.AddObservedValuesIndicator(
                target_field=transform.FieldName.TARGET,
                output_field="observed_values",
            ),
            transform.VstackFeatures(
                output_field="dynamic_feat",
                input_fields=["age", "time_feat"],
                drop_inputs=True,
            ),
            transform.InstanceSplitter(
                target_field=transform.FieldName.TARGET,
                is_pad_field=transform.FieldName.IS_PAD,
                start_field=transform.FieldName.START,
                forecast_start_field=transform.FieldName.FORECAST_START,
                train_sampler=transform.ExpectedNumInstanceSampler(
                    num_instances=4
                ),
                past_length=train_length,
                future_length=pred_length,
                time_series_fields=["dynamic_feat", "observed_values"],
            ),
        ]
    )

    assert_serializable(t)

    for u in t(iter(ds), is_train=True):
        print(u)
Ejemplo n.º 5
0
def test_multi_dim_transformation(is_train):
    train_length = 10

    first_dim = np.arange(1, 11, 1).tolist()
    first_dim[-1] = "NaN"

    second_dim = np.arange(11, 21, 1).tolist()
    second_dim[0] = "NaN"

    ds = gluonts.dataset.common.ListDataset(
        data_iter=[{"start": "2012-01-01", "target": [first_dim, second_dim]}],
        freq="1D",
        one_dim_target=False,
    )
    pred_length = 2

    # Looks weird - but this is necessary to assert the nan entries correctly.
    first_dim[-1] = np.nan
    second_dim[0] = np.nan

    t = transform.Chain(
        trans=[
            transform.AddTimeFeatures(
                start_field=transform.FieldName.START,
                target_field=transform.FieldName.TARGET,
                output_field="time_feat",
                time_features=[
                    time_feature.DayOfWeek(),
                    time_feature.DayOfMonth(),
                    time_feature.MonthOfYear(),
                ],
                pred_length=pred_length,
            ),
            transform.AddAgeFeature(
                target_field=transform.FieldName.TARGET,
                output_field="age",
                pred_length=pred_length,
                log_scale=True,
            ),
            transform.AddObservedValuesIndicator(
                target_field=transform.FieldName.TARGET,
                output_field="observed_values",
                convert_nans=False,
            ),
            transform.VstackFeatures(
                output_field="dynamic_feat",
                input_fields=["age", "time_feat"],
                drop_inputs=True,
            ),
            transform.InstanceSplitter(
                target_field=transform.FieldName.TARGET,
                is_pad_field=transform.FieldName.IS_PAD,
                start_field=transform.FieldName.START,
                forecast_start_field=transform.FieldName.FORECAST_START,
                train_sampler=transform.ExpectedNumInstanceSampler(
                    num_instances=4
                ),
                past_length=train_length,
                future_length=pred_length,
                time_series_fields=["dynamic_feat", "observed_values"],
                output_NTC=False,
            ),
        ]
    )

    assert_serializable(t)

    if is_train:
        for u in t(iter(ds), is_train=True):
            assert_shape(u["past_target"], (2, 10))
            assert_shape(u["past_dynamic_feat"], (4, 10))
            assert_shape(u["past_observed_values"], (2, 10))
            assert_shape(u["future_target"], (2, 2))

            assert_padded_array(
                u["past_observed_values"],
                np.array([[1.0] * 9 + [0.0], [0.0] + [1.0] * 9]),
                u["past_is_pad"],
            )
            assert_padded_array(
                u["past_target"],
                np.array([first_dim, second_dim]),
                u["past_is_pad"],
            )
    else:
        for u in t(iter(ds), is_train=False):
            assert_shape(u["past_target"], (2, 10))
            assert_shape(u["past_dynamic_feat"], (4, 10))
            assert_shape(u["past_observed_values"], (2, 10))
            assert_shape(u["future_target"], (2, 0))

            assert_padded_array(
                u["past_observed_values"],
                np.array([[1.0] * 9 + [0.0], [0.0] + [1.0] * 9]),
                u["past_is_pad"],
            )
            assert_padded_array(
                u["past_target"],
                np.array([first_dim, second_dim]),
                u["past_is_pad"],
            )