def test_set_observed_state(self): # set up configuration config_dir = path.join(path.dirname(__file__), "netcdf") cfg = orchestration.YAMLConfig( "atnsjoen_simulation.yaml", "atnsjoen", config_dir=config_dir, data_dir=shyftdata_dir) # get a simulator simulator = cfg.get_simulator() n_cells = simulator.region_model.size() state_repos = DefaultStateRepository(cfg.model_t, n_cells) state = state_repos.get_state(0) simulator.run(cfg.time_axis, state) simulator.region_model.get_states(state) obs_discharge = 0.0 state = simulator.discharge_adjusted_state(obs_discharge, state) self.assertAlmostEqual(0.0, reduce(operator.add, (state[i].kirchner.q for i in range(state.size())))) simulator.region_model.get_states(state) obs_discharge = 10.0 # m3/s state = simulator.discharge_adjusted_state(obs_discharge, state) # Convert from l/h to m3/s by dividing by 3.6e6 adj_discharge = reduce(operator.add, (state[i].kirchner.q*cell.geo.area() for (i, cell) in enumerate(simulator.region_model.get_cells())))/(3.6e6) self.assertAlmostEqual(obs_discharge, adj_discharge)
def test_run_geo_ts_data_config_simulator(self): # These config files are versioned in shyft git config_dir = path.join(path.dirname(__file__), "netcdf") config_file = path.join(config_dir, "neanidelva_simulation.yaml") config_section = "neanidelva" cfg = YAMLSimConfig(config_file, config_section, overrides={'config': {'number_of_steps': 168}}) # These config files are versioned in shyft-data git. Read from ${SHYFTDATA}/netcdf/orchestration-testdata/ # TODO: Put all config files needed to run this test under the same versioning system (shyft git) simulator = ConfigSimulator(cfg) n_cells = simulator.region_model.size() state_repos = DefaultStateRepository(simulator.region_model.__class__, n_cells) simulator.run(cfg.time_axis, state_repos.get_state(0)) cids = IntVector() discharge = simulator.region_model.statistics.discharge(cids) # Regression tests on discharge values self.assertAlmostEqual(discharge.values[0], 0.1961, 3) self.assertAlmostEqual(discharge.values[3], 2.748813, 3) # # x self.assertAlmostEqual(discharge.values[6400], 58.8385, 3) # was 58.9381,3 before glacier&fractions adjustments # x self.assertAlmostEqual(discharge.values[3578],5.5069,3) # glacier_melt, not much, but enough to test # x self.assertAlmostEqual(simulator.region_model.gamma_snow_response.glacier_melt(cids).values.to_numpy().max(),0.201625547258,4) self.assertAlmostEqual(simulator.region_model.gamma_snow_response.glacier_melt(cids).values.to_numpy().max(), 0.11938204918828155, 4) # Regression tests on geo fractions self.assertAlmostEqual(simulator.region_model.cells[0].geo.land_type_fractions_info().unspecified(), 1.0, 3) self.assertAlmostEqual(simulator.region_model.cells[2].geo.land_type_fractions_info().unspecified(), 0.1433, 3) self.assertAlmostEqual(simulator.region_model.cells[2].geo.land_type_fractions_info().forest(), 0.0, 3) self.assertAlmostEqual(simulator.region_model.cells[2].geo.land_type_fractions_info().reservoir(), 0.8566, 3)
def test_run_geo_ts_data_config_simulator(self): # set up configuration config_dir = path.join(path.dirname(__file__), "netcdf") config_file = path.join(config_dir,"neanidelva_simulation.yaml") config_section = "neanidelva" cfg = YAMLSimConfig(config_file, config_section) # get a simulator simulator = ConfigSimulator(cfg) n_cells = simulator.region_model.size() state_repos = DefaultStateRepository(simulator.region_model.__class__, n_cells) simulator.run(cfg.time_axis, state_repos.get_state(0)) cids = IntVector() discharge = simulator.region_model.statistics.discharge(cids) # regression test on discharge values self.assertAlmostEqual(discharge.values[0],0.1961,3) self.assertAlmostEqual(discharge.values[3],2.7582,3) self.assertAlmostEqual(discharge.values[6400],58.9381,3) self.assertAlmostEqual(discharge.values[3578],5.5069,3) # regression test on geo fractions self.assertAlmostEqual(simulator.region_model.cells[0].geo.land_type_fractions_info().unspecified(),1.0,3) self.assertAlmostEqual(simulator.region_model.cells[2].geo.land_type_fractions_info().unspecified(),0.1433,3) self.assertAlmostEqual(simulator.region_model.cells[2].geo.land_type_fractions_info().forest(),0.0,3) self.assertAlmostEqual(simulator.region_model.cells[2].geo.land_type_fractions_info().reservoir(),0.8566,3) self.assertAlmostEqual(simulator.region_model.cells[3383].geo.land_type_fractions_info().lake(),0.7432,3) self.assertAlmostEqual(simulator.region_model.cells[652].geo.land_type_fractions_info().glacier(),0.1351,3)
def run_calibration(self, model_t): # set up configuration config_dir = path.join(path.dirname(__file__), "netcdf") cfg = orchestration.YAMLConfig( "atnsjoen_calibration.yaml", "atnsjoen", config_dir=config_dir, data_dir=shyftdata_dir, model_t=model_t) time_axis = cfg.time_axis # get a simulator simulator = cfg.get_simulator() n_cells = simulator.region_model.size() state_repos = DefaultStateRepository(cfg.model_t, n_cells) simulator.run(time_axis, state_repos.get_state(0)) cid = 1 target_discharge_ts = simulator.region_model.statistics.discharge([cid]) target_discharge = api.TsTransform().to_average(time_axis.time(0), time_axis.time(1)-time_axis.time(0), time_axis.size(), target_discharge_ts) # Perturb parameters param = simulator.region_model.get_region_parameter() p_vec_orig = [param.get(i) for i in range(param.size())] p_vec_min = p_vec_orig[:] p_vec_max = p_vec_orig[:] p_vec_guess = p_vec_orig[:] random.seed(0) p_names = [] for i in range(4): p_names.append(param.get_name(i)) p_vec_min[i] *= 0.5 p_vec_max[i] *= 1.5 p_vec_guess[i] = random.uniform(p_vec_min[i], p_vec_max[i]) if p_vec_min[i] > p_vec_max[i]: p_vec_min[i], p_vec_max[i] = p_vec_max[i], p_vec_min[i] p_min = simulator.region_model.parameter_t() p_max = simulator.region_model.parameter_t() p_guess = simulator.region_model.parameter_t() p_min.set(p_vec_min) p_max.set(p_vec_max) p_guess.set(p_vec_guess) # Find parameters target_spec = api.TargetSpecificationPts(target_discharge, api.IntVector([cid]), 1.0, api.KLING_GUPTA) target_spec_vec = api.TargetSpecificationVector() #([target_spec]) does not yet work target_spec_vec.append(target_spec) p_opt = simulator.optimize(time_axis, state_repos.get_state(0), target_spec_vec, p_guess, p_min, p_max) simulator.region_model.set_catchment_parameter(cid, p_opt) simulator.run(time_axis, state_repos.get_state(0)) found_discharge = simulator.region_model.statistics.discharge([cid]) t_vs = np.array([target_discharge.value(i) for i in range(target_discharge.size())]) t_ts = np.array([target_discharge.time(i) for i in range(target_discharge.size())]) f_vs = np.array([found_discharge.value(i) for i in range(found_discharge.size())]) f_ts = np.array([found_discharge.time(i) for i in range(found_discharge.size())]) self.assertTrue(np.linalg.norm(t_ts - f_ts) < 1.0e-10) self.assertTrue(np.linalg.norm(t_vs - f_vs) < 1.0e-3)
def burn_in_state(simulator, t_start, t_stop, q_obs_m3s_ts): dt = deltahours(1) n = int(round((t_stop - t_start)/dt)) time_axis = Timeaxis(t_start, dt, n) n_cells = simulator.region_model.size() state_repos = DefaultStateRepository(simulator.region_model.__class__, n_cells) simulator.run(time_axis, state_repos.get_state(0)) # Go back in time (to t_start) and adjust q with observed discharge at that time. # This will give us a good initial state at t_start return adjust_simulator_state(simulator, t_start, q_obs_m3s_ts)
def burn_in_state(shyft_model, time_axis, q_obs_m3s_at_start): n_cells = shyft_model.region_model.size() state_repos = DefaultStateRepository(shyft_model.region_model.__class__, n_cells) s0 = state_repos.get_state(0) # get out a state to start with for s in s0: s.kirchner.q = 0.5 # insert some more water than the default 0.001 mm shyft_model.run(time_axis, s0) # Go back in time (to t_start) and adjust q with observed discharge at that time. # This will give us a good initial state at t_start return shyft_model.discharge_adjusted_state(q_obs_m3s_at_start)
def test_run_geo_ts_data_simulator(self): # set up configuration config_dir = path.join(path.dirname(__file__), "netcdf") cfg = orchestration.YAMLConfig( "atnsjoen_simulation.yaml", "atnsjoen", config_dir=config_dir, data_dir=shyftdata_dir) # get a simulator simulator = cfg.get_simulator() n_cells = simulator.region_model.size() state_repos = DefaultStateRepository(cfg.model_t, n_cells) simulator.run(cfg.time_axis, state_repos.get_state(0))
def test_run_geo_ts_data_config_simulator(self): # set up configuration config_dir = path.join(path.dirname(__file__), "netcdf") config_file = path.join(config_dir,"neanidelva_simulation.yaml") config_section = "neanidelva" cfg = YAMLSimConfig(config_file, config_section) # get a simulator simulator = ConfigSimulator(cfg) n_cells = simulator.region_model.size() state_repos = DefaultStateRepository(simulator.region_model.__class__, n_cells) simulator.run(cfg.time_axis, state_repos.get_state(0))
def test_compute_lwc_percentiles(self): # Simulation time axis year, month, day, hour = 2010, 9, 1, 0 dt = api.deltahours(24) n_steps = 400 utc = api.Calendar() # No offset gives Utc t0 = utc.time(api.YMDhms(year, month, day, hour)) time_axis = api.Timeaxis(t0, dt, n_steps) # Some fake ids region_id = 0 interpolation_id = 0 # Simulation coordinate system epsg = "32633" # Model model_t = pt_gs_k.PTGSKModel # Configs and repositories dataset_config_file = path.join(path.dirname(__file__), "netcdf", "atnsjoen_datasets.yaml") region_config_file = path.join(path.dirname(__file__), "netcdf", "atnsjoen_calibration_region.yaml") region_config = RegionConfig(region_config_file) model_config = ModelConfig(self.model_config_file) dataset_config = YamlContent(dataset_config_file) region_model_repository = RegionModelRepository(region_config, model_config, model_t, epsg) interp_repos = InterpolationParameterRepository(model_config) netcdf_geo_ts_repos = [] for source in dataset_config.sources: station_file = source["params"]["stations_met"] netcdf_geo_ts_repos.append(GeoTsRepository(source["params"], station_file, "")) geo_ts_repository = GeoTsRepositoryCollection(netcdf_geo_ts_repos) # Construct target discharge series simulator = DefaultSimulator(region_id, interpolation_id, region_model_repository, geo_ts_repository, interp_repos, None) n_cells = simulator.region_model.size() state_repos = DefaultStateRepository(model_t, n_cells) cid = 1 simulator.region_model.set_state_collection(cid, True) simulator.run(time_axis, state_repos.get_state(0)) percentile_list = [10, 25, 50, 75, 90] # From here, things could be calculated without copies (except for 't') # TODO: Graham optimize with numba :-) cells = simulator.region_model.get_cells() lwcs = [np.array(cell.sc.gs_lwc.v) for cell in cells] # Contiguous t = np.array([cells[0].sc.gs_lwc.time(i) for i in range(cells[0].sc.gs_lwc.size())]) percentiles = np.percentile(np.array(lwcs), percentile_list, 0)
def test_run_arome_ensemble(self): # Simulation time axis year, month, day, hour = 2015, 7, 26, 0 n_hours = 30 dt = api.deltahours(1) utc = api.Calendar() # No offset gives Utc t0 = utc.time(api.YMDhms(year, month, day, hour)) time_axis = api.Timeaxis(t0, dt, n_hours) # Some dummy ids not needed for the netcdf based repositories region_id = 0 interpolation_id = 0 # Simulation coordinate system epsg = "32633" # Model model_t = pt_gs_k.PTGSKOptModel # Configs and repositories region_config = RegionConfig(self.region_config_file) model_config = ModelConfig(self.model_config_file) region_model_repository = RegionModelRepository(region_config, model_config, model_t, epsg) interp_repos = InterpolationParameterRepository(model_config) base_dir = path.join(shyftdata_dir, "netcdf", "arome") pattern = "fc*.nc" try: geo_ts_repository = AromeDataRepository(epsg, base_dir, filename=pattern, allow_subset=True) except Exception as e: print("**** test_run_arome_ensemble: Arome data missing or" " wrong, test inconclusive ****") print("****{}****".format(e)) self.skipTest("**** test_run_arome_ensemble: Arome data missing or wrong, test " "inconclusive ****\n\t exception:{}".format(e)) simulator = DefaultSimulator(region_id, interpolation_id, region_model_repository, geo_ts_repository, interp_repos, None) n_cells = simulator.region_model.size() state_repos = DefaultStateRepository(model_t, n_cells) simulators = simulator.create_ensembles(time_axis, t0, state_repos.get_state(0)) for s in simulators: s.simulate()
def test_compute_lwc_percentiles(self): # Simulation time axis year, month, day, hour = 2010, 9, 1, 0 dt = api.deltahours(24) n_steps = 400 utc = api.Calendar() # No offset gives Utc t0 = utc.time(api.YMDhms(year, month, day, hour)) time_axis = api.Timeaxis(t0, dt, n_steps) # Some fake ids region_id = 0 interpolation_id = 0 # Simulation coordinate system epsg = "32633" # Model model_t = pt_gs_k.PTGSKModel # Configs and repositories dataset_config_file = path.join(path.dirname(__file__), "netcdf", "atnsjoen_datasets.yaml") region_config_file = path.join(path.dirname(__file__), "netcdf", "atnsjoen_calibration_region.yaml") region_config = RegionConfig(region_config_file) model_config = ModelConfig(self.model_config_file) dataset_config = YamlContent(dataset_config_file) region_model_repository = RegionModelRepository(region_config, model_config, model_t, epsg) interp_repos = InterpolationParameterRepository(model_config) netcdf_geo_ts_repos = [] for source in dataset_config.sources: station_file = source["params"]["stations_met"] netcdf_geo_ts_repos.append(GeoTsRepository(source["params"], station_file, "")) geo_ts_repository = GeoTsRepositoryCollection(netcdf_geo_ts_repos) # Construct target discharge series simulator = DefaultSimulator(region_id, interpolation_id, region_model_repository, geo_ts_repository, interp_repos, None) n_cells = simulator.region_model.size() state_repos = DefaultStateRepository(model_t, n_cells) cid = 1 simulator.region_model.set_state_collection(cid, True) simulator.run(time_axis, state_repos.get_state(0)) self.assertAlmostEqual(simulator.region_model.cells[0].rc.pe_output.values[0], 0.039768354, 5) # just to verify pot.evap by regression, mm/h percentile_list = [10, 25, 50, 75, 90]
def run_simulator(self, model_t): # Simulation time axis dt0 = api.YMDhms(2015, 8, 24, 6) n_hours = 30 dt = api.deltahours(1) utc = api.Calendar() # No offset gives Utc t0 = utc.time(dt0) time_axis = api.Timeaxis(t0, dt, n_hours) # Some dummy ids not needed for the netcdf based repositories region_id = 0 interpolation_id = 0 # Simulation coordinate system epsg = "32633" # Configs and repositories region_config = RegionConfig(self.region_config_file) model_config = ModelConfig(self.model_config_file) region_model_repository = RegionModelRepository(region_config, model_config, model_t, epsg) interp_repos = InterpolationParameterRepository(model_config) date_str = "{}{:02}{:02}_{:02}".format(dt0.year, dt0.month, dt0.day, dt0.hour) base_dir = path.join(shyftdata_dir, "repository", "arome_data_repository") f1 = "arome_metcoop_red_default2_5km_{}.nc".format(date_str) f2 = "arome_metcoop_red_test2_5km_{}.nc".format(date_str) ar1 = AromeDataRepository(epsg, base_dir, filename=f1, allow_subset=True) ar2 = AromeDataRepository(epsg, base_dir, filename=f2, elevation_file=f1, allow_subset=True) geo_ts_repository = GeoTsRepositoryCollection([ar1, ar2]) simulator = DefaultSimulator(region_id, interpolation_id, region_model_repository, geo_ts_repository, interp_repos, None) n_cells = simulator.region_model.size() state_repos = DefaultStateRepository(model_t, n_cells) simulator.run(time_axis, state_repos.get_state(0))
def test_snow_and_ground_water_response_calibration(self): """ Test dual calibration strategy: * First fit the three Kirchner parameters for ground water response during July, August, and September. * Then fit two snow routine parameters (tx and max_water) from November to April. """ # Simulation time axis year, month, day, hour = 2010, 9, 1, 0 dt = api.deltahours(24) n_steps = 400 utc = api.Calendar() # No offset gives Utc t0 = utc.time(api.YMDhms(year, month, day, hour)) time_axis = api.Timeaxis(t0, dt, n_steps) # Some fake ids region_id = 0 interpolation_id = 0 # Simulation coordinate system epsg = "32633" # Model model_t = pt_gs_k.PTGSKOptModel # Configs and repositories dataset_config_file = path.join(path.dirname(__file__), "netcdf", "atnsjoen_datasets.yaml") region_config_file = path.join(path.dirname(__file__), "netcdf", "atnsjoen_calibration_region.yaml") region_config = RegionConfig(region_config_file) model_config = ModelConfig(self.model_config_file) dataset_config = YamlContent(dataset_config_file) region_model_repository = RegionModelRepository(region_config, model_config, model_t, epsg) interp_repos = InterpolationParameterRepository(model_config) netcdf_geo_ts_repos = [] for source in dataset_config.sources: station_file = source["params"]["stations_met"] netcdf_geo_ts_repos.append(GeoTsRepository(source["params"], station_file, "")) geo_ts_repository = GeoTsRepositoryCollection(netcdf_geo_ts_repos) # Construct target discharge series simulator = DefaultSimulator(region_id, interpolation_id, region_model_repository, geo_ts_repository, interp_repos, None) n_cells = simulator.region_model.size() state_repos = DefaultStateRepository(model_t, n_cells) simulator.run(time_axis, state_repos.get_state(0)) cid = 1 target_discharge = simulator.region_model.statistics.discharge([cid]) # Construct kirchner parameters param = simulator.region_model.parameter_t(simulator.region_model.get_region_parameter()) print_param("True solution", param) kirchner_param_min = simulator.region_model.parameter_t(param) kirchner_param_max = simulator.region_model.parameter_t(param) # Kichner parameters are quite abstract (no physical meaning), so simply scale them kirchner_param_min.kirchner.c1 *= 0.8 kirchner_param_min.kirchner.c2 *= 0.8 kirchner_param_min.kirchner.c3 *= 0.8 kirchner_param_max.kirchner.c1 *= 1.2 kirchner_param_max.kirchner.c2 *= 1.2 kirchner_param_max.kirchner.c3 *= 1.2 # kirchner_t_start = utc.time(api.YMDhms(2011, 4, 1, 0)) # kirchner_time_axis = api.Timeaxis(kirchner_t_start, dt, 150) kirchner_time_axis = time_axis # Construct gamma snow parameters (realistic tx and max_lwc) gamma_snow_param_min = simulator.region_model.parameter_t(param) gamma_snow_param_max = simulator.region_model.parameter_t(param) gamma_snow_param_min.gs.tx = -1.0 # Min snow/rain temperature threshold gamma_snow_param_min.gs.max_water = 0.05 # Min 8% max water in snow in costal regions gamma_snow_param_max.gs.tx = 1.0 gamma_snow_param_max.gs.max_water = 0.25 # Max 35% max water content, or we get too little melt gs_t_start = utc.time(api.YMDhms(2010, 11, 1, 0)) gs_time_axis = api.Timeaxis(gs_t_start, dt, 250) # gs_time_axis = time_axis # Find parameters target_spec = api.TargetSpecificationPts(target_discharge, api.IntVector([cid]), 1.0, api.KLING_GUPTA) target_spec_vec = api.TargetSpecificationVector() # TODO: We currently dont fix list initializer for vectors target_spec_vec.append(target_spec) # Construct a fake, perturbed starting point for calibration p_vec = [param.get(i) for i in range(param.size())] for i, name in enumerate([param.get_name(i) for i in range(len(p_vec))]): if name not in ("c1" "c2", "c3", "TX", "max_water"): next if name in ("c1", "c2", "c3"): p_vec[i] = random.uniform(0.8*p_vec[i], 1.2*p_vec[i]) elif name == "TX": p_vec[i] = random.uniform(gamma_snow_param_min.gs.tx, gamma_snow_param_max.gs.tx) elif name == "max_water": p_vec[i] = random.uniform(gamma_snow_param_min.gs.max_water, gamma_snow_param_max.gs.max_water) param.set(p_vec) print_param("Initial guess", param) # Two pass optimization, once for the ground water response, and second time for kirchner_p_opt = simulator.optimize(kirchner_time_axis, state_repos.get_state(0), target_spec_vec, param, kirchner_param_min, kirchner_param_max) gamma_snow_p_opt = simulator.optimize(gs_time_axis, state_repos.get_state(0), target_spec_vec, kirchner_p_opt, gamma_snow_param_min, gamma_snow_param_max) print_param("Half way result", kirchner_p_opt) print_param("Result", gamma_snow_p_opt) simulator.region_model.set_catchment_parameter(cid, gamma_snow_p_opt) simulator.run(time_axis, state_repos.get_state(0)) found_discharge = simulator.region_model.statistics.discharge([cid]) t_vs = np.array(target_discharge.v) t_ts = np.array([target_discharge.time(i) for i in range(target_discharge.size())]) f_vs = np.array(found_discharge.v) f_ts = np.array([found_discharge.time(i) for i in range(found_discharge.size())])
def test_run_observed_then_arome_and_store(self): """ Start Tistel 2015.09.01, dummy state with some kirchner water use observations around Tistel (geo_ts_repository) and simulate forwared to 2015.10.01 (store discharge and catchment level precip/temp) then use arome forecast for 65 hours (needs arome for this period in arome-directory) finally store the arome results. """ utc = Calendar() # No offset gives Utc time_axis = Timeaxis(utc.time(YMDhms(2015, 9, 1, 0)), deltahours(1), 30 * 24) fc_time_axis = Timeaxis(utc.time(YMDhms(2015, 10, 1, 0)), deltahours(1), 65) interpolation_id = 0 ptgsk = DefaultSimulator("Tistel-ptgsk", interpolation_id, self.region_model_repository, self.geo_ts_repository, self.interpolation_repository, None) n_cells = ptgsk.region_model.size() ptgsk_state = DefaultStateRepository(ptgsk.region_model.__class__, n_cells) ptgsk.region_model.set_state_collection(-1, True) # collect state so we can inspect it s0 = ptgsk_state.get_state(0) for i in range(s0.size()): # add some juice to get started s0[i].kirchner.q = 0.5 ptgsk.run(time_axis, s0) print("Done simulation, testing that we can extract data from model") cids = api.IntVector() # we pull out for all the catchments-id if it's empty model = ptgsk.region_model # fetch out the model sum_discharge = model.statistics.discharge(cids) self.assertIsNotNone(sum_discharge) avg_temperature = model.statistics.temperature(cids) avg_precipitation = model.statistics.precipitation(cids) self.assertIsNotNone(avg_precipitation) self.assertIsNotNone(avg_temperature) for time_step in range(time_axis.size()): precip_raster = model.statistics.precipitation(cids, time_step) # example raster output self.assertEqual(precip_raster.size(), n_cells) avg_gs_lwc = model.gamma_snow_state.lwc(cids) # sca skaugen|gamma self.assertIsNotNone(avg_gs_lwc) # lwc surface_heat alpha melt_mean melt iso_pot_energy temp_sw avg_gs_output = model.gamma_snow_response.outflow(cids) self.assertIsNotNone(avg_gs_output) print("done. now save to db") # SmGTsRepository(PROD,FC_PROD) save_list = [ TsStoreItem(u'/test/x/shyft/tistel/discharge_m3s', lambda m: m.statistics.discharge(cids)), TsStoreItem(u'/test/x/shyft/tistel/temperature', lambda m: m.statistics.temperature(cids)), TsStoreItem(u'/test/x/shyft/tistel/precipitation', lambda m: m.statistics.precipitation(cids)), ] tss = TimeseriesStore(SmGTsRepository(PREPROD, FC_PREPROD), save_list) self.assertTrue(tss.store_ts(ptgsk.region_model)) print("Run forecast arome") endstate = ptgsk.region_model.state_t.vector_t() ptgsk.region_model.get_states(endstate) # get the state at end of obs ptgsk.geo_ts_repository = self.arome_repository # switch to arome here ptgsk.run_forecast(fc_time_axis, fc_time_axis.start, endstate) # now forecast print("Done forecast") fc_save_list = [ TsStoreItem(u'/test/x/shyft/tistel/fc_discharge_m3s', lambda m: m.statistics.discharge(cids)), TsStoreItem(u'/test/x/shyft/tistel/fc_temperature', lambda m: m.statistics.temperature(cids)), TsStoreItem(u'/test/x/shyft/tistel/fc_precipitation', lambda m: m.statistics.precipitation(cids)), TsStoreItem(u'/test/x/shyft/tistel/fc_radiation', lambda m: m.statistics.radiation(cids)), TsStoreItem(u'/test/x/shyft/tistel/fc_rel_hum', lambda m: m.statistics.rel_hum(cids)), TsStoreItem(u'/test/x/shyft/tistel/fc_wind_speed', lambda m: m.statistics.wind_speed(cids)), ] TimeseriesStore(SmGTsRepository(PREPROD, FC_PREPROD), fc_save_list).store_ts(ptgsk.region_model) print("Done save to db")