def test_get_ensemble_forecast_collection(self):
        EPSG = 32633
        upper_left_x = 436100.0
        upper_left_y = 7417800.0
        nx = 74
        ny = 94
        dx = 1000.0
        dy = 1000.0
        t0 = api.YMDhms(2015, 7, 26, 0)
        n_hours = 30
        utc = api.Calendar()  # No offset gives Utc
        period = api.UtcPeriod(utc.time(t0), utc.time(t0) + api.deltahours(n_hours))
        t_c = utc.time(t0) + api.deltahours(1)

        base_dir = path.join(shyftdata_dir, "netcdf", "arome")
        pattern = "fc2015072600.nc"
        bbox = ([upper_left_x, upper_left_x + nx * dx,
                 upper_left_x + nx * dx, upper_left_x],
                [upper_left_y, upper_left_y,
                 upper_left_y - ny * dy, upper_left_y - ny * dy])
        try:
            ar1 = AromeDataRepository(EPSG, base_dir, filename=pattern, bounding_box=bbox)
            ar2 = AromeDataRepository(EPSG, base_dir, filename=pattern, bounding_box=bbox)
            repos = GeoTsRepositoryCollection([ar1, ar2])
            data_names = ("temperature", "wind_speed", "relative_humidity")
            ensemble = repos.get_forecast_ensemble(data_names, period, t_c, None)
            self.assertTrue(isinstance(ensemble, list))
            self.assertEqual(len(ensemble), 10)
            with self.assertRaises(GeoTsRepositoryCollectionError) as context:
                repos = GeoTsRepositoryCollection([ar1, ar2], reduce_type="add")
                repos.get_forecast_ensemble(data_names, period, t_c, None)
            self.assertEqual("Only replace is supported yet", context.exception.args[0])
        except AromeDataRepositoryError as adre:
            self.skipTest("(test inconclusive- missing arome-data {0})".format(adre))
    def test_get_forecast_collection(self):
        n_hours = 30
        dt = api.deltahours(1)
        utc = api.Calendar()  # No offset gives Utc
        tc = api.YMDhms(2015, 8, 24, 6)
        t0 = utc.time(tc)
        period = api.UtcPeriod(t0, t0 + api.deltahours(n_hours))
        date_str = "{}{:02}{:02}_{:02}".format(tc.year, tc.month, tc.day, tc.hour)

        epsg, bbox = self.arome_epsg_bbox

        base_dir = path.join(shyftdata_dir, "repository", "arome_data_repository")
        f1 = "arome_metcoop_red_default2_5km_{}.nc".format(date_str)
        f2 = "arome_metcoop_red_test2_5km_{}.nc".format(date_str)

        ar1 = AromeDataRepository(epsg, base_dir, filename=f1, allow_subset=True)
        ar2 = AromeDataRepository(epsg, base_dir, filename=f2, elevation_file=f1, allow_subset=True)

        geo_ts_repository = GeoTsRepositoryCollection([ar1, ar2])
        source_names = ("temperature", "radiation")
        sources = geo_ts_repository.get_forecast(source_names, period, t0,
                                                 geo_location_criteria=bbox)
        self.assertTrue(all([x in source_names for x in sources]))

        geo_ts_repository = GeoTsRepositoryCollection([ar1, ar2], reduce_type="add")
        with self.assertRaises(GeoTsRepositoryCollectionError) as context:
            sources = geo_ts_repository.get_forecast(("temperature", "radiation"),
                                                     period, t0, geo_location_criteria=bbox)
Beispiel #3
0
        def arome_repository(self):
            """ 
            This shows how we
            join together two arome repositories to a repository that gives us all the variabes that we need.
             arome_4: all except radiation
             arome_rad: ratiation
            Return
            ------
            Arome repository (inside a GeoTsRepositoryCollection), with all datafiles that matches
            the filepattrns we currently use for arome downloads.

            """
            base_dir = path.join(self.statkraft_data_dir, "repository",
                                 "arome_data_repository")

            epsg = self.grid_spec.epsg()
            bbox = self.grid_spec.bounding_box(epsg)
            f1 = "arome_metcoop_default2_5km_20151001_00.nc"
            f2 = "arome_metcoop_test2_5km_20151001_00.nc"
            arome_4 = AromeDataRepository(epsg,
                                          base_dir,
                                          filename=f1,
                                          bounding_box=bbox,
                                          allow_subset=True)
            arome_rad = AromeDataRepository(epsg,
                                            base_dir,
                                            filename=f2,
                                            elevation_file=f1,
                                            bounding_box=bbox,
                                            allow_subset=True)
            return GeoTsRepositoryCollection([arome_4, arome_rad])
    def test_compute_lwc_percentiles(self):
        # Simulation time axis
        year, month, day, hour = 2010, 9, 1, 0
        dt = api.deltahours(24)
        n_steps = 400
        utc = api.Calendar()  # No offset gives Utc
        t0 = utc.time(api.YMDhms(year, month, day, hour))
        time_axis = api.Timeaxis(t0, dt, n_steps)

        # Some fake ids
        region_id = 0
        interpolation_id = 0

        # Simulation coordinate system
        epsg = "32633"

        # Model
        model_t = pt_gs_k.PTGSKModel

        # Configs and repositories
        dataset_config_file = path.join(path.dirname(__file__), "netcdf",
                                        "atnsjoen_datasets.yaml")
        region_config_file = path.join(path.dirname(__file__), "netcdf",
                                       "atnsjoen_calibration_region.yaml")
        region_config = RegionConfig(region_config_file)
        model_config = ModelConfig(self.model_config_file)
        dataset_config = YamlContent(dataset_config_file)
        region_model_repository = RegionModelRepository(
            region_config, model_config, model_t, epsg)
        interp_repos = InterpolationParameterRepository(model_config)
        netcdf_geo_ts_repos = []
        for source in dataset_config.sources:
            station_file = source["params"]["stations_met"]
            netcdf_geo_ts_repos.append(
                GeoTsRepository(source["params"], station_file, ""))
        geo_ts_repository = GeoTsRepositoryCollection(netcdf_geo_ts_repos)

        # Construct target discharge series
        simulator = DefaultSimulator(region_id, interpolation_id,
                                     region_model_repository,
                                     geo_ts_repository, interp_repos, None)
        n_cells = simulator.region_model.size()
        state_repos = DefaultStateRepository(model_t, n_cells)
        cid = 1
        simulator.region_model.set_state_collection(cid, True)
        simulator.run(time_axis, state_repos.get_state(0))

        percentile_list = [10, 25, 50, 75, 90]
        # From here, things could be calculated without copies (except for 't')
        # TODO: Graham optimize with numba :-)
        cells = simulator.region_model.get_cells()
        lwcs = [np.array(cell.sc.gs_lwc.v) for cell in cells]  # Contiguous
        t = np.array([
            cells[0].sc.gs_lwc.time(i)
            for i in range(cells[0].sc.gs_lwc.size())
        ])
        percentiles = np.percentile(np.array(lwcs), percentile_list, 0)
    def test_compute_lwc_percentiles(self):
        # Simulation time axis
        year, month, day, hour = 2010, 9, 1, 0
        dt = api.deltahours(24)
        n_steps = 400
        utc = api.Calendar()  # No offset gives Utc
        t0 = utc.time(year, month, day, hour)
        time_axis = api.TimeAxisFixedDeltaT(t0, dt, n_steps)

        # Some fake ids
        region_id = 0
        interpolation_id = 0

        # Simulation coordinate system
        epsg = "32633"

        # Model
        model_t = pt_gs_k.PTGSKModel

        # Configs and repositories
        dataset_config_file = path.join(path.dirname(__file__), "netcdf",
                                        "atnsjoen_datasets.yaml")
        region_config_file = path.join(path.dirname(__file__), "netcdf",
                                       "atnsjoen_calibration_region.yaml")
        region_config = RegionConfig(region_config_file)
        model_config = ModelConfig(self.model_config_file)
        dataset_config = YamlContent(dataset_config_file)
        region_model_repository = RegionModelRepository(
            region_config, model_config, model_t, epsg)
        interp_repos = InterpolationParameterRepository(model_config)
        netcdf_geo_ts_repos = []
        for source in dataset_config.sources:
            station_file = source["params"]["stations_met"]
            netcdf_geo_ts_repos.append(
                GeoTsRepository(source["params"], station_file, ""))
        geo_ts_repository = GeoTsRepositoryCollection(netcdf_geo_ts_repos)

        # Construct target discharge series
        simulator = DefaultSimulator(region_id, interpolation_id,
                                     region_model_repository,
                                     geo_ts_repository, interp_repos, None)
        n_cells = simulator.region_model.size()
        state_repos = DefaultStateRepository(model_t, n_cells)
        cid = 1
        simulator.region_model.set_state_collection(cid, True)
        simulator.run(time_axis, state_repos.get_state(0))
        self.assertAlmostEqual(
            simulator.region_model.cells[0].rc.pe_output.values[0],
            0.039768354, 5)  # just to verify pot.evap by regression, mm/h

        percentile_list = [10, 25, 50, 75, 90]
Beispiel #6
0
def construct_geots_repo(datasets_config, epsg=None):
    """ iterates over the different sources that are provided
    and prepares the repository to read the data for each type"""
    geo_ts_repos = []
    src_types_to_extract = []
    for source in datasets_config['sources']:
        if epsg is not None:
            source['params'].update({'epsg': epsg})
        # note that here we are instantiating the different source repositories
        # to place in the geo_ts list
        geo_ts_repos.append(source['repository'](**source['params']))
        src_types_to_extract.append(source['types'])

    return GeoTsRepositoryCollection(geo_ts_repos, src_types_per_repo=src_types_to_extract)
    def run_simulator(self, model_t):
        # Simulation time axis
        dt0 = api.YMDhms(2015, 8, 24, 6)
        n_hours = 30
        dt = api.deltahours(1)
        utc = api.Calendar()  # No offset gives Utc
        t0 = utc.time(dt0)
        time_axis = api.Timeaxis(t0, dt, n_hours)

        # Some dummy ids not needed for the netcdf based repositories
        region_id = 0
        interpolation_id = 0

        # Simulation coordinate system
        epsg = "32633"

        # Configs and repositories
        region_config = RegionConfig(self.region_config_file)
        model_config = ModelConfig(self.model_config_file)
        region_model_repository = RegionModelRepository(
            region_config, model_config, model_t, epsg)
        interp_repos = InterpolationParameterRepository(model_config)
        date_str = "{}{:02}{:02}_{:02}".format(dt0.year, dt0.month, dt0.day,
                                               dt0.hour)
        base_dir = path.join(shyftdata_dir, "repository",
                             "arome_data_repository")
        f1 = "arome_metcoop_red_default2_5km_{}.nc".format(date_str)
        f2 = "arome_metcoop_red_test2_5km_{}.nc".format(date_str)

        ar1 = AromeDataRepository(epsg,
                                  base_dir,
                                  filename=f1,
                                  allow_subset=True)
        ar2 = AromeDataRepository(epsg,
                                  base_dir,
                                  filename=f2,
                                  elevation_file=f1,
                                  allow_subset=True)

        geo_ts_repository = GeoTsRepositoryCollection([ar1, ar2])

        simulator = DefaultSimulator(region_id, interpolation_id,
                                     region_model_repository,
                                     geo_ts_repository, interp_repos, None)
        n_cells = simulator.region_model.size()
        state_repos = DefaultStateRepository(model_t, n_cells)
        simulator.run(time_axis, state_repos.get_state(0))
Beispiel #8
0
    def test_get_ensemble_forecast_collection(self):
        EPSG = 32633
        upper_left_x = 436100.0
        upper_left_y = 7417800.0
        nx = 74
        ny = 94
        dx = 1000.0
        dy = 1000.0
        t0 = api.YMDhms(2015, 7, 26, 0)
        n_hours = 30
        utc = api.Calendar()  # No offset gives Utc
        period = api.UtcPeriod(utc.time(t0),
                               utc.time(t0) + api.deltahours(n_hours))
        t_c = utc.time(t0) + api.deltahours(1)

        base_dir = path.join(shyftdata_dir, "netcdf", "arome")
        pattern = "fc2015072600.nc"
        bbox = ([
            upper_left_x, upper_left_x + nx * dx, upper_left_x + nx * dx,
            upper_left_x
        ], [
            upper_left_y, upper_left_y, upper_left_y - ny * dy,
            upper_left_y - ny * dy
        ])
        try:
            ar1 = AromeDataRepository(EPSG,
                                      base_dir,
                                      filename=pattern,
                                      bounding_box=bbox)
            ar2 = AromeDataRepository(EPSG,
                                      base_dir,
                                      filename=pattern,
                                      bounding_box=bbox)
            repos = GeoTsRepositoryCollection([ar1, ar2])
            data_names = ("temperature", "wind_speed", "relative_humidity")
            ensemble = repos.get_forecast_ensemble(data_names, period, t_c,
                                                   None)
            self.assertTrue(isinstance(ensemble, list))
            self.assertEqual(len(ensemble), 10)
            with self.assertRaises(GeoTsRepositoryCollectionError) as context:
                repos = GeoTsRepositoryCollection([ar1, ar2],
                                                  reduce_type="add")
                repos.get_forecast_ensemble(data_names, period, t_c, None)
            self.assertEqual("Only replace is supported yet",
                             context.exception.args[0])
        except AromeDataRepositoryError as adre:
            self.skipTest(
                "(test inconclusive- missing arome-data {0})".format(adre))
Beispiel #9
0
    def test_get_timeseries_collection(self):
        tc = api.YMDhms(2015, 8, 24, 6)
        n_hours = 30
        dt = api.deltahours(1)
        utc = api.Calendar()  # No offset gives Utc
        t0 = utc.time(tc)
        period = api.UtcPeriod(t0, t0 + api.deltahours(n_hours))
        date_str = "{}{:02}{:02}_{:02}".format(tc.year, tc.month, tc.day,
                                               tc.hour)

        epsg, bbox = self.arome_epsg_bbox

        base_dir = path.join(shyftdata_dir, "repository",
                             "arome_data_repository")
        f1 = "arome_metcoop_red_default2_5km_{}.nc".format(date_str)
        f2 = "arome_metcoop_red_test2_5km_{}.nc".format(date_str)

        ar1 = AromeDataRepository(epsg,
                                  base_dir,
                                  filename=f1,
                                  allow_subset=True)
        ar2 = AromeDataRepository(epsg,
                                  base_dir,
                                  filename=f2,
                                  elevation_file=f1,
                                  allow_subset=True)

        geo_ts_repository = GeoTsRepositoryCollection([ar1, ar2])
        sources = geo_ts_repository.get_timeseries(
            ("temperature", "radiation"), period, geo_location_criteria=bbox)

        with self.assertRaises(GeoTsRepositoryCollectionError) as context:
            GeoTsRepositoryCollection([ar1, ar2], reduce_type="foo")

        geo_ts_repository = GeoTsRepositoryCollection([ar1, ar2],
                                                      reduce_type="add")
        with self.assertRaises(GeoTsRepositoryCollectionError) as context:
            sources = geo_ts_repository.get_timeseries(
                ("temperature", "radiation"),
                period,
                geo_location_criteria=bbox)
Beispiel #10
0
    def test_compute_lwc_percentiles(self):
        # Simulation time axis
        year, month, day, hour = 2013, 9, 1, 0
        dt = api.deltahours(24)
        n_steps = 364
        utc = api.Calendar()  # No offset gives Utc
        t0 = utc.time(year, month, day, hour)
        time_axis = api.TimeAxisFixedDeltaT(t0, dt, n_steps)

        # Some fake ids
        region_id = 0
        interpolation_id = 0

        # Model
        region_model_repository = CFRegionModelRepository(
            self.region_config, self.model_config)
        interp_repos = InterpolationParameterRepository(
            self.interpolation_config)
        netcdf_geo_ts_repos = [
            CFDataRepository(32633,
                             source["params"]["filename"],
                             padding=source["params"]['padding'])
            for source in self.dataset_config.sources
        ]
        geo_ts_repository = GeoTsRepositoryCollection(netcdf_geo_ts_repos)

        # Construct target discharge series
        simulator = DefaultSimulator(region_id, interpolation_id,
                                     region_model_repository,
                                     geo_ts_repository, interp_repos, None)
        state_repos = DefaultStateRepository(simulator.region_model)
        cid = 1228
        simulator.region_model.set_state_collection(cid, True)
        simulator.run(time_axis=time_axis, state=state_repos.get_state(0))
        # TODO: Update the regression test below with correct result
        # self.assertAlmostEqual(simulator.region_model.cells[0].rc.pe_output.values[0], 0.039768354, 5)  # just to verify pot.evap by regression, mm/h

        percentile_list = [10, 25, 50, 75, 90]
Beispiel #11
0
    def test_get_forecast_collection(self):
        n_hours = 30
        dt = api.deltahours(1)
        utc = api.Calendar()  # No offset gives Utc
        tc = api.YMDhms(2015, 8, 24, 6)
        t0 = utc.time(tc)
        period = api.UtcPeriod(t0, t0 + api.deltahours(n_hours))
        date_str = "{}{:02}{:02}_{:02}".format(tc.year, tc.month, tc.day,
                                               tc.hour)

        epsg, bbox, bpoly = self.arome_epsg_bbox

        base_dir = path.join(shyftdata_dir, "repository",
                             "arome_data_repository")
        f1_elev = "arome_metcoop_red_default2_5km_{}.nc".format(date_str)
        f1 = "arome_metcoop_red_default2_5km_(\d{4})(\d{2})(\d{2})[T_](\d{2})Z?.nc$"
        #f2 = "arome_metcoop_red_test2_5km_{}.nc".format(date_str)
        f2 = "arome_metcoop_red_test2_5km_(\d{4})(\d{2})(\d{2})[T_](\d{2})Z?.nc$"

        ar1 = MetNetcdfDataRepository(epsg,
                                      base_dir,
                                      filename=f1,
                                      allow_subset=True)
        ar2 = MetNetcdfDataRepository(epsg,
                                      base_dir,
                                      filename=f2,
                                      elevation_file=f1_elev,
                                      allow_subset=True)

        geo_ts_repository = GeoTsRepositoryCollection([ar1, ar2])
        source_names = ("temperature", "radiation")
        sources = geo_ts_repository.get_forecast(source_names,
                                                 period,
                                                 t0,
                                                 geo_location_criteria=bpoly)
        self.assertTrue(all([x in source_names for x in sources]))

        geo_ts_repository = GeoTsRepositoryCollection([ar1, ar2],
                                                      reduce_type="add")
        with self.assertRaises(GeoTsRepositoryCollectionError) as context:
            sources = geo_ts_repository.get_forecast(
                ("temperature", "radiation"),
                period,
                t0,
                geo_location_criteria=bpoly)
Beispiel #12
0
    def test_snow_and_ground_water_response_calibration(self):
        """
        Test dual calibration strategy:
            * First fit the three Kirchner parameters for
              ground water response during July, August, and
              September.
            * Then fit two snow routine parameters (tx and max_water)
              from November to April.
        """
        # Simulation time axis
        year, month, day, hour = 2010, 9, 1, 0
        dt = api.deltahours(24)
        n_steps = 400
        utc = api.Calendar()  # No offset gives Utc
        t0 = utc.time(api.YMDhms(year, month, day, hour))
        time_axis = api.Timeaxis(t0, dt, n_steps)

        # Some fake ids
        region_id = 0
        interpolation_id = 0

        # Simulation coordinate system
        epsg = "32633"

        # Model
        model_t = pt_gs_k.PTGSKOptModel

        # Configs and repositories
        dataset_config_file = path.join(path.dirname(__file__), "netcdf",
                                        "atnsjoen_datasets.yaml")
        region_config_file = path.join(path.dirname(__file__), "netcdf",
                                       "atnsjoen_calibration_region.yaml")
        region_config = RegionConfig(region_config_file)
        model_config = ModelConfig(self.model_config_file)
        dataset_config = YamlContent(dataset_config_file)
        region_model_repository = RegionModelRepository(
            region_config, model_config, model_t, epsg)
        interp_repos = InterpolationParameterRepository(model_config)
        netcdf_geo_ts_repos = []
        for source in dataset_config.sources:
            station_file = source["params"]["stations_met"]
            netcdf_geo_ts_repos.append(
                GeoTsRepository(source["params"], station_file, ""))
        geo_ts_repository = GeoTsRepositoryCollection(netcdf_geo_ts_repos)

        # Construct target discharge series
        simulator = DefaultSimulator(region_id, interpolation_id,
                                     region_model_repository,
                                     geo_ts_repository, interp_repos, None)
        n_cells = simulator.region_model.size()
        state_repos = DefaultStateRepository(model_t, n_cells)
        simulator.run(time_axis, state_repos.get_state(0))
        cid = 1
        target_discharge = simulator.region_model.statistics.discharge([cid])

        # Construct kirchner parameters
        param = simulator.region_model.parameter_t(
            simulator.region_model.get_region_parameter())
        print_param("True solution", param)

        kirchner_param_min = simulator.region_model.parameter_t(param)
        kirchner_param_max = simulator.region_model.parameter_t(param)
        # Kichner parameters are quite abstract (no physical meaning), so simply scale them
        kirchner_param_min.kirchner.c1 *= 0.8
        kirchner_param_min.kirchner.c2 *= 0.8
        kirchner_param_min.kirchner.c3 *= 0.8
        kirchner_param_max.kirchner.c1 *= 1.2
        kirchner_param_max.kirchner.c2 *= 1.2
        kirchner_param_max.kirchner.c3 *= 1.2
        # kirchner_t_start = utc.time(api.YMDhms(2011, 4, 1, 0))
        # kirchner_time_axis = api.Timeaxis(kirchner_t_start, dt, 150)
        kirchner_time_axis = time_axis

        # Construct gamma snow parameters (realistic tx and max_lwc)
        gamma_snow_param_min = simulator.region_model.parameter_t(param)
        gamma_snow_param_max = simulator.region_model.parameter_t(param)
        gamma_snow_param_min.gs.tx = -1.0  # Min snow/rain temperature threshold
        gamma_snow_param_min.gs.max_water = 0.05  # Min 8% max water in snow in costal regions
        gamma_snow_param_max.gs.tx = 1.0
        gamma_snow_param_max.gs.max_water = 0.25  # Max 35% max water content, or we get too little melt
        gs_t_start = utc.time(api.YMDhms(2010, 11, 1, 0))
        gs_time_axis = api.Timeaxis(gs_t_start, dt, 250)
        # gs_time_axis = time_axis

        # Find parameters
        target_spec = api.TargetSpecificationPts(target_discharge,
                                                 api.IntVector([cid]), 1.0,
                                                 api.KLING_GUPTA)
        target_spec_vec = api.TargetSpecificationVector(
        )  # TODO: We currently dont fix list initializer for vectors
        target_spec_vec.append(target_spec)
        # Construct a fake, perturbed starting point for calibration
        p_vec = [param.get(i) for i in range(param.size())]
        for i, name in enumerate(
            [param.get_name(i) for i in range(len(p_vec))]):
            if name not in ("c1" "c2", "c3", "TX", "max_water"):
                next
            if name in ("c1", "c2", "c3"):
                p_vec[i] = random.uniform(0.8 * p_vec[i], 1.2 * p_vec[i])
            elif name == "TX":
                p_vec[i] = random.uniform(gamma_snow_param_min.gs.tx,
                                          gamma_snow_param_max.gs.tx)
            elif name == "max_water":
                p_vec[i] = random.uniform(gamma_snow_param_min.gs.max_water,
                                          gamma_snow_param_max.gs.max_water)
        param.set(p_vec)
        print_param("Initial guess", param)
        # Two pass optimization, once for the ground water response, and second time for
        kirchner_p_opt = simulator.optimize(kirchner_time_axis,
                                            state_repos.get_state(0),
                                            target_spec_vec, param,
                                            kirchner_param_min,
                                            kirchner_param_max)
        gamma_snow_p_opt = simulator.optimize(gs_time_axis,
                                              state_repos.get_state(0),
                                              target_spec_vec, kirchner_p_opt,
                                              gamma_snow_param_min,
                                              gamma_snow_param_max)
        print_param("Half way result", kirchner_p_opt)
        print_param("Result", gamma_snow_p_opt)

        simulator.region_model.set_catchment_parameter(cid, gamma_snow_p_opt)
        simulator.run(time_axis, state_repos.get_state(0))
        found_discharge = simulator.region_model.statistics.discharge([cid])

        t_vs = np.array(target_discharge.v)
        t_ts = np.array(
            [target_discharge.time(i) for i in range(target_discharge.size())])
        f_vs = np.array(found_discharge.v)
        f_ts = np.array(
            [found_discharge.time(i) for i in range(found_discharge.size())])
Beispiel #13
0
    def test_snow_and_ground_water_response_calibration(self):
        """
        Test dual calibration strategy:
            * First fit the three Kirchner parameters for
              ground water response during July, August, and
              September.
            * Then fit two snow routine parameters (tx and max_water)
              from November to April.
        """
        # Simulation time axis
        dt = api.deltahours(24)
        n_steps = 364
        utc = api.Calendar()  # No offset gives Utc
        t0 = utc.time(2013, 9, 1, 0)
        time_axis = api.TimeAxisFixedDeltaT(t0, dt, n_steps)

        # Some fake ids
        region_id = 0
        interpolation_id = 0
        opt_model_t = self.model_config.model_type().opt_model_t
        model_config = ModelConfig(self.model_config_file,
                                   overrides={'model_t': opt_model_t})
        region_model_repository = CFRegionModelRepository(
            self.region_config, model_config)
        interp_repos = InterpolationParameterRepository(
            self.interpolation_config)
        netcdf_geo_ts_repos = [
            CFDataRepository(32633,
                             source["params"]["filename"],
                             padding=source["params"]['padding'])
            for source in self.dataset_config.sources
        ]
        geo_ts_repository = GeoTsRepositoryCollection(netcdf_geo_ts_repos)

        # Construct target discharge series
        simulator = DefaultSimulator(region_id, interpolation_id,
                                     region_model_repository,
                                     geo_ts_repository, interp_repos, None)
        state_repos = DefaultStateRepository(simulator.region_model)
        simulator.run(time_axis, state_repos.get_state(0))
        cid = 1228
        target_discharge = api.TsTransform().to_average(
            t0, dt, n_steps,
            simulator.region_model.statistics.discharge([cid]))

        # Construct kirchner parameters
        param = simulator.region_model.parameter_t(
            simulator.region_model.get_region_parameter())
        print_param("True solution", param)

        kirchner_param_min = simulator.region_model.parameter_t(param)
        kirchner_param_max = simulator.region_model.parameter_t(param)
        # Kichner parameters are quite abstract (no physical meaning), so simply scale them
        kirchner_param_min.kirchner.c1 *= 0.8
        kirchner_param_min.kirchner.c2 *= 0.8
        kirchner_param_min.kirchner.c3 *= 0.8
        kirchner_param_max.kirchner.c1 *= 1.2
        kirchner_param_max.kirchner.c2 *= 1.2
        kirchner_param_max.kirchner.c3 *= 1.2
        # kirchner_t_start = utc.time(2011, 4, 1, 0)
        # kirchner_time_axis = api.TimeAxisFixedDeltaT(kirchner_t_start, dt, 150)
        kirchner_time_axis = time_axis

        # Construct gamma snow parameters (realistic tx and max_lwc)
        gamma_snow_param_min = simulator.region_model.parameter_t(param)
        gamma_snow_param_max = simulator.region_model.parameter_t(param)
        gamma_snow_param_min.gs.tx = -1.0  # Min snow/rain temperature threshold
        gamma_snow_param_min.gs.max_water = 0.05  # Min 8% max water in snow in costal regions
        gamma_snow_param_max.gs.tx = 1.0
        gamma_snow_param_max.gs.max_water = 0.25  # Max 35% max water content, or we get too little melt
        gs_t_start = utc.time(2013, 11, 1, 0)
        gs_time_axis = api.TimeAxisFixedDeltaT(gs_t_start, dt, 250)
        # gs_time_axis = time_axis

        # Find parameters
        target_spec = api.TargetSpecificationPts(target_discharge,
                                                 api.IntVector([cid]), 1.0,
                                                 api.KLING_GUPTA)
        target_spec_vec = api.TargetSpecificationVector(
        )  # TODO: We currently dont fix list initializer for vectors
        target_spec_vec.append(target_spec)
        # Construct a fake, perturbed starting point for calibration
        p_vec = [param.get(i) for i in range(param.size())]
        for i, name in enumerate(
            [param.get_name(i) for i in range(len(p_vec))]):
            if name not in ("c1" "c2", "c3", "TX", "max_water"):
                next
            if name in ("c1", "c2", "c3"):
                p_vec[i] = random.uniform(0.8 * p_vec[i], 1.2 * p_vec[i])
            elif name == "TX":
                p_vec[i] = random.uniform(gamma_snow_param_min.gs.tx,
                                          gamma_snow_param_max.gs.tx)
            elif name == "max_water":
                p_vec[i] = random.uniform(gamma_snow_param_min.gs.max_water,
                                          gamma_snow_param_max.gs.max_water)
        param.set(p_vec)
        print_param("Initial guess", param)
        # Two pass optimization, once for the ground water response, and second time for
        kirchner_p_opt = simulator.optimize(kirchner_time_axis,
                                            state_repos.get_state(0),
                                            target_spec_vec, param,
                                            kirchner_param_min,
                                            kirchner_param_max)
        gamma_snow_p_opt = simulator.optimize(gs_time_axis,
                                              state_repos.get_state(0),
                                              target_spec_vec, kirchner_p_opt,
                                              gamma_snow_param_min,
                                              gamma_snow_param_max)
        print_param("Half way result", kirchner_p_opt)
        print_param("Result", gamma_snow_p_opt)

        simulator.region_model.set_catchment_parameter(cid, gamma_snow_p_opt)
        simulator.run(time_axis, state_repos.get_state(0))
        found_discharge = simulator.region_model.statistics.discharge([cid])