def test_inference_quantile_prediction(quantiles, inference_quantiles): hps = { "seed": 42, "freq": "D", "prediction_length": 3, "quantiles": quantiles, "epochs": 3, "num_batches_per_epoch": 3, "is_iqf": True, } dataset_train, dataset_test = make_dummy_datasets_with_features( cardinality=[3, 10], num_feat_dynamic_real=2, freq=hps["freq"], prediction_length=hps["prediction_length"], ) estimator = MQCNNEstimator.from_inputs(dataset_train, **hps) predictor = estimator.train(dataset_train, num_workers=None) forecasts = list(predictor.predict(dataset_test)) assert len(forecasts) == len(dataset_test) item_id = 0 for inference_quantile in inference_quantiles: assert (len(forecasts[item_id].quantile(inference_quantile)) == hps["prediction_length"]) inference_quantile = sorted(inference_quantiles) previous_quantile_prediction = forecasts[item_id].quantile( inference_quantile[0]) for inference_quantile in inference_quantiles[1:]: assert all(previous_quantile_prediction[i] <= pred for (i, pred) in enumerate(forecasts[item_id].quantile( inference_quantile))), f"quantile-crossing occurred"
def test_mqcnn_covariate_smoke_test( use_feat_dynamic_real, add_time_feature, add_age_feature, enable_decoder_dynamic_feature, hybridize, ): hps = { "seed": 42, "freq": "D", "prediction_length": 3, "quantiles": [0.5, 0.1], "epochs": 3, "num_batches_per_epoch": 3, "use_feat_dynamic_real": use_feat_dynamic_real, "add_time_feature": add_time_feature, "add_age_feature": add_age_feature, "enable_decoder_dynamic_feature": enable_decoder_dynamic_feature, "hybridize": hybridize, } dataset_train, dataset_test = make_dummy_datasets_with_features( cardinality=[3, 10], num_feat_dynamic_real=2, freq=hps["freq"], prediction_length=hps["prediction_length"], ) estimator = MQCNNEstimator.from_hyperparameters(**hps) predictor = estimator.train(dataset_train, num_workers=0) forecasts = list(predictor.predict(dataset_test)) assert len(forecasts) == len(dataset_test)
def test_mqcnn_covariate_smoke_test( use_past_feat_dynamic_real, use_feat_dynamic_real, add_time_feature, add_age_feature, enable_encoder_dynamic_feature, enable_decoder_dynamic_feature, hybridize, quantiles, distr_output, is_iqf, ): hps = { "seed": 42, "freq": "Y", "context_length": 5, "prediction_length": 3, "quantiles": quantiles, "distr_output": distr_output, "epochs": 3, "num_batches_per_epoch": 3, "use_past_feat_dynamic_real": use_past_feat_dynamic_real, "use_feat_dynamic_real": use_feat_dynamic_real, "add_time_feature": add_time_feature, "add_age_feature": add_age_feature, "enable_encoder_dynamic_feature": enable_encoder_dynamic_feature, "enable_decoder_dynamic_feature": enable_decoder_dynamic_feature, "hybridize": hybridize, "is_iqf": is_iqf, } dataset_train, dataset_test = make_dummy_datasets_with_features( cardinality=[3, 10], num_feat_dynamic_real=2, num_past_feat_dynamic_real=4, freq=hps["freq"], prediction_length=hps["prediction_length"], ) estimator = MQCNNEstimator.from_hyperparameters(**hps) predictor = estimator.train(dataset_train, num_workers=None) forecasts = list(predictor.predict(dataset_test)) assert len(forecasts) == len(dataset_test)
def test_backwards_compatibility(): hps = { "freq": "D", "context_length": 5, "num_forking": 4, "prediction_length": 3, "quantiles": [0.5, 0.1], "epochs": 3, "num_batches_per_epoch": 3, "use_feat_dynamic_real": True, "use_past_feat_dynamic_real": True, "enable_encoder_dynamic_feature": True, "enable_decoder_dynamic_feature": True, "scaling": True, "scaling_decoder_dynamic_feature": True, } dataset_train, dataset_test = make_dummy_datasets_with_features( cardinality=[3, 10], num_feat_dynamic_real=2, num_past_feat_dynamic_real=4, freq=hps["freq"], prediction_length=hps["prediction_length"], ) for i in range(len(dataset_train)): dataset_train.list_data[i]["dynamic_feat"] = dataset_train.list_data[ i ]["feat_dynamic_real"] del dataset_train.list_data[i]["feat_dynamic_real"] for i in range(len(dataset_test)): dataset_test.list_data[i]["dynamic_feat"] = dataset_test.list_data[i][ "feat_dynamic_real" ] del dataset_test.list_data[i]["feat_dynamic_real"] estimator = MQCNNEstimator.from_inputs(dataset_train, **hps) predictor = estimator.train(dataset_train, num_workers=None) forecasts = list(predictor.predict(dataset_test)) assert len(forecasts) == len(dataset_test)
def test_feat_static_cat_smoke_test(use_feat_static_cat, cardinality): hps = { "seed": 42, "freq": "D", "prediction_length": 3, "quantiles": [0.5, 0.1], "epochs": 3, "num_batches_per_epoch": 3, "use_feat_static_cat": use_feat_static_cat, } dataset_train, dataset_test = make_dummy_datasets_with_features( cardinality=cardinality, num_feat_dynamic_real=2, freq=hps["freq"], prediction_length=hps["prediction_length"], ) estimator = MQCNNEstimator.from_inputs(dataset_train, **hps) predictor = estimator.train(dataset_train, num_workers=None) forecasts = list(predictor.predict(dataset_test)) assert len(forecasts) == len(dataset_test)
def test_mqcnn_scaling_smoke_test(scaling): hps = { "seed": 42, "freq": "D", "prediction_length": 3, "quantiles": [0.5, 0.1], "epochs": 3, "num_batches_per_epoch": 3, "scaling": scaling, } dataset_train, dataset_test = make_dummy_datasets_with_features( cardinality=[3, 10], num_feat_dynamic_real=2, freq=hps["freq"], prediction_length=hps["prediction_length"], ) estimator = MQCNNEstimator.from_inputs(dataset_train, **hps) predictor = estimator.train(dataset_train, num_workers=0) forecasts = list(predictor.predict(dataset_test)) assert len(forecasts) == len(dataset_test)
from gluonts.testutil.dummy_datasets import make_dummy_datasets_with_features common_estimator_hps = dict( freq="D", prediction_length=3, trainer=Trainer(epochs=3, num_batches_per_epoch=2, batch_size=4), ) @pytest.mark.parametrize( "estimator, datasets", [ # No features ( partial(DeepAREstimator, **common_estimator_hps), make_dummy_datasets_with_features(), ), # Single static categorical feature ( partial( DeepAREstimator, **common_estimator_hps, use_feat_static_cat=True, cardinality=[5], ), make_dummy_datasets_with_features(cardinality=[5]), ), # Multiple static categorical features ( partial( DeepAREstimator,