def test_predict(self, tmp_path, use_pred_months, use_latlons, experiment):
        x, _, _ = _make_dataset(size=(5, 5), const=True)
        y = x.isel(time=[-1])

        train_features = tmp_path / f"features/{experiment}/train/1980_1"
        train_features.mkdir(parents=True)

        test_features = tmp_path / f"features/{experiment}/test/1980_1"
        test_features.mkdir(parents=True)

        # static
        x_static, _, _ = _make_dataset(size=(5, 5), add_times=False)
        static_features = tmp_path / f"features/static"
        static_features.mkdir(parents=True)
        x_static.to_netcdf(static_features / "data.nc")

        static_norm_dict = {"VHI": {"mean": 0.0, "std": 1.0}}
        with (tmp_path /
              f"features/static/normalizing_dict.pkl").open("wb") as f:
            pickle.dump(static_norm_dict, f)

        # if nowcast we need another x feature
        if experiment == "nowcast":
            x_add1, _, _ = _make_dataset(size=(5, 5),
                                         const=True,
                                         variable_name="precip")
            x_add2, _, _ = _make_dataset(size=(5, 5),
                                         const=True,
                                         variable_name="temp")
            x = xr.merge([x, x_add1, x_add2])

            norm_dict = {
                "VHI": {
                    "mean": 0,
                    "std": 1
                },
                "precip": {
                    "mean": 0,
                    "std": 1
                },
                "temp": {
                    "mean": 0,
                    "std": 1
                },
            }
        else:
            norm_dict = {"VHI": {"mean": 0, "std": 1}}

        with (tmp_path /
              f"features/{experiment}/normalizing_dict.pkl").open("wb") as f:
            pickle.dump(norm_dict, f)

        x.to_netcdf(test_features / "x.nc")
        y.to_netcdf(test_features / "y.nc")

        x.to_netcdf(train_features / "x.nc")
        y.to_netcdf(train_features / "y.nc")

        layer_sizes = [10]
        dropout = 0.25

        model = LinearNetwork(
            data_folder=tmp_path,
            layer_sizes=layer_sizes,
            dropout=dropout,
            experiment=experiment,
            include_pred_month=use_pred_months,
            include_latlons=use_latlons,
        )
        model.train()
        test_arrays_dict, pred_dict = model.predict()

        # the foldername "1980_1" is the only one which should be in the dictionaries
        assert ("1980_1" in test_arrays_dict.keys()) and (len(test_arrays_dict)
                                                          == 1)
        assert ("1980_1" in pred_dict.keys()) and (len(pred_dict) == 1)

        # _make_dataset with const=True returns all ones
        assert (test_arrays_dict["1980_1"]["y"] == 1).all()
    def test_predict(self, tmp_path, use_pred_months, use_latlons, experiment):
        x, _, _ = _make_dataset(size=(5, 5), const=True)
        y = x.isel(time=[-1])

        train_features = tmp_path / f'features/{experiment}/train/hello'
        train_features.mkdir(parents=True)

        test_features = tmp_path / f'features/{experiment}/test/hello'
        test_features.mkdir(parents=True)

        # static
        x_static, _, _ = _make_dataset(size=(5, 5), add_times=False)
        static_features = tmp_path / f'features/static'
        static_features.mkdir(parents=True)
        x_static.to_netcdf(static_features / 'data.nc')

        static_norm_dict = {'VHI': {'mean': 0.0, 'std': 1.0}}
        with (tmp_path /
              f'features/static/normalizing_dict.pkl').open('wb') as f:
            pickle.dump(static_norm_dict, f)

        # if nowcast we need another x feature
        if experiment == 'nowcast':
            x_add1, _, _ = _make_dataset(size=(5, 5),
                                         const=True,
                                         variable_name='precip')
            x_add2, _, _ = _make_dataset(size=(5, 5),
                                         const=True,
                                         variable_name='temp')
            x = xr.merge([x, x_add1, x_add2])

            norm_dict = {
                'VHI': {
                    'mean': 0,
                    'std': 1
                },
                'precip': {
                    'mean': 0,
                    'std': 1
                },
                'temp': {
                    'mean': 0,
                    'std': 1
                }
            }
        else:
            norm_dict = {'VHI': {'mean': 0, 'std': 1}}

        with (tmp_path /
              f'features/{experiment}/normalizing_dict.pkl').open('wb') as f:
            pickle.dump(norm_dict, f)

        x.to_netcdf(test_features / 'x.nc')
        y.to_netcdf(test_features / 'y.nc')

        x.to_netcdf(train_features / 'x.nc')
        y.to_netcdf(train_features / 'y.nc')

        layer_sizes = [10]
        dropout = 0.25

        model = LinearNetwork(data_folder=tmp_path,
                              layer_sizes=layer_sizes,
                              dropout=dropout,
                              experiment=experiment,
                              include_pred_month=use_pred_months,
                              include_latlons=use_latlons)
        model.train()
        test_arrays_dict, pred_dict = model.predict()

        # the foldername "hello" is the only one which should be in the dictionaries
        assert ('hello' in test_arrays_dict.keys()) and (len(test_arrays_dict)
                                                         == 1)
        assert ('hello' in pred_dict.keys()) and (len(pred_dict) == 1)

        # _make_dataset with const=True returns all ones
        assert (test_arrays_dict['hello']['y'] == 1).all()