def test_inference_quantile_prediction(quantiles, inference_quantiles): hps = { "seed": 42, "freq": "D", "prediction_length": 3, "quantiles": quantiles, "epochs": 3, "num_batches_per_epoch": 3, "is_iqf": True, } dataset_train, dataset_test = make_dummy_datasets_with_features( cardinality=[3, 10], num_feat_dynamic_real=2, freq=hps["freq"], prediction_length=hps["prediction_length"], ) estimator = MQCNNEstimator.from_inputs(dataset_train, **hps) predictor = estimator.train(dataset_train, num_workers=None) forecasts = list(predictor.predict(dataset_test)) assert len(forecasts) == len(dataset_test) item_id = 0 for inference_quantile in inference_quantiles: assert (len(forecasts[item_id].quantile(inference_quantile)) == hps["prediction_length"]) inference_quantile = sorted(inference_quantiles) previous_quantile_prediction = forecasts[item_id].quantile( inference_quantile[0]) for inference_quantile in inference_quantiles[1:]: assert all(previous_quantile_prediction[i] <= pred for (i, pred) in enumerate(forecasts[item_id].quantile( inference_quantile))), f"quantile-crossing occurred"
def test_backwards_compatibility(): hps = { "freq": "D", "context_length": 5, "num_forking": 4, "prediction_length": 3, "quantiles": [0.5, 0.1], "epochs": 3, "num_batches_per_epoch": 3, "use_feat_dynamic_real": True, "use_past_feat_dynamic_real": True, "enable_encoder_dynamic_feature": True, "enable_decoder_dynamic_feature": True, "scaling": True, "scaling_decoder_dynamic_feature": True, } dataset_train, dataset_test = make_dummy_datasets_with_features( cardinality=[3, 10], num_feat_dynamic_real=2, num_past_feat_dynamic_real=4, freq=hps["freq"], prediction_length=hps["prediction_length"], ) for i in range(len(dataset_train)): dataset_train.list_data[i]["dynamic_feat"] = dataset_train.list_data[ i ]["feat_dynamic_real"] del dataset_train.list_data[i]["feat_dynamic_real"] for i in range(len(dataset_test)): dataset_test.list_data[i]["dynamic_feat"] = dataset_test.list_data[i][ "feat_dynamic_real" ] del dataset_test.list_data[i]["feat_dynamic_real"] estimator = MQCNNEstimator.from_inputs(dataset_train, **hps) predictor = estimator.train(dataset_train, num_workers=None) forecasts = list(predictor.predict(dataset_test)) assert len(forecasts) == len(dataset_test)
def test_feat_static_cat_smoke_test(use_feat_static_cat, cardinality): hps = { "seed": 42, "freq": "D", "prediction_length": 3, "quantiles": [0.5, 0.1], "epochs": 3, "num_batches_per_epoch": 3, "use_feat_static_cat": use_feat_static_cat, } dataset_train, dataset_test = make_dummy_datasets_with_features( cardinality=cardinality, num_feat_dynamic_real=2, freq=hps["freq"], prediction_length=hps["prediction_length"], ) estimator = MQCNNEstimator.from_inputs(dataset_train, **hps) predictor = estimator.train(dataset_train, num_workers=None) forecasts = list(predictor.predict(dataset_test)) assert len(forecasts) == len(dataset_test)
def test_mqcnn_scaling_smoke_test(scaling): hps = { "seed": 42, "freq": "D", "prediction_length": 3, "quantiles": [0.5, 0.1], "epochs": 3, "num_batches_per_epoch": 3, "scaling": scaling, } dataset_train, dataset_test = make_dummy_datasets_with_features( cardinality=[3, 10], num_feat_dynamic_real=2, freq=hps["freq"], prediction_length=hps["prediction_length"], ) estimator = MQCNNEstimator.from_inputs(dataset_train, **hps) predictor = estimator.train(dataset_train, num_workers=0) forecasts = list(predictor.predict(dataset_test)) assert len(forecasts) == len(dataset_test)