def test_run_geo_ts_data_config_simulator(self): # These config files are versioned in shyft git config_dir = path.join(path.dirname(__file__), "netcdf") config_file = path.join(config_dir, "neanidelva_simulation.yaml") config_section = "neanidelva" cfg = YAMLSimConfig(config_file, config_section, overrides={'config': {'number_of_steps': 168}}) # These config files are versioned in shyft-data git. Read from ${SHYFTDATA}/netcdf/orchestration-testdata/ # TODO: Put all config files needed to run this test under the same versioning system (shyft git) simulator = ConfigSimulator(cfg) n_cells = simulator.region_model.size() state_repos = DefaultStateRepository(simulator.region_model.__class__, n_cells) simulator.run(cfg.time_axis, state_repos.get_state(0)) cids = IntVector() discharge = simulator.region_model.statistics.discharge(cids) # Regression tests on discharge values self.assertAlmostEqual(discharge.values[0], 0.0957723, 3) self.assertAlmostEqual(discharge.values[3], 3.9098, 3) # # x self.assertAlmostEqual(discharge.values[6400], 58.8385, 3) # was 58.9381,3 before glacier&fractions adjustments # x self.assertAlmostEqual(discharge.values[3578],5.5069,3) # glacier_melt, not much, but enough to test # x self.assertAlmostEqual(simulator.region_model.gamma_snow_response.glacier_melt(cids).values.to_numpy().max(),0.201625547258,4) self.assertAlmostEqual(simulator.region_model.gamma_snow_response.glacier_melt(cids).values.to_numpy().max(), 0.12393672891230645, 4) # Regression tests on geo fractions self.assertAlmostEqual(simulator.region_model.cells[0].geo.land_type_fractions_info().unspecified(), 1.0, 3) self.assertAlmostEqual(simulator.region_model.cells[2].geo.land_type_fractions_info().unspecified(), 0.1433, 3) self.assertAlmostEqual(simulator.region_model.cells[2].geo.land_type_fractions_info().forest(), 0.0, 3) self.assertAlmostEqual(simulator.region_model.cells[2].geo.land_type_fractions_info().reservoir(), 0.8566, 3)
def continuous_calibration(): utc = Calendar() t_start = utc.time(YMDhms(2011, 9, 1)) t_fc_start = utc.time(YMDhms(2015, 10, 1)) dt = deltahours(1) n_obs = int(round((t_fc_start - t_start)/dt)) obs_time_axis = TimeAxisFixedDeltaT(t_start, dt, n_obs + 1) q_obs_m3s_ts = observed_tistel_discharge(obs_time_axis.total_period()) ptgsk = create_tistel_simulator(PTGSKOptModel, tistel.geo_ts_repository(tistel.grid_spec.epsg())) initial_state = burn_in_state(ptgsk, t_start, utc.time(YMDhms(2012, 9, 1)), q_obs_m3s_ts) num_opt_days = 30 # Step forward num_opt_days days and store the state for each day: recal_start = t_start + deltahours(num_opt_days*24) t = t_start state = initial_state opt_states = {t: state} while t < recal_start: ptgsk.run(TimeAxisFixedDeltaT(t, dt, 24), state) t += deltahours(24) state = ptgsk.reg_model_state opt_states[t] = state recal_stop = utc.time(YMDhms(2011, 10, 30)) recal_stop = utc.time(YMDhms(2012, 5, 30)) curr_time = recal_start q_obs_avg = TsTransform().to_average(t_start, dt, n_obs + 1, q_obs_m3s_ts) target_spec = TargetSpecificationPts(q_obs_avg, IntVector([0]), 1.0, KLING_GUPTA) target_spec_vec = TargetSpecificationVector([target_spec]) i = 0 times = [] values = [] p, p_min, p_max = construct_calibration_parameters(ptgsk) while curr_time < recal_stop: print(i) i += 1 opt_start = curr_time - deltahours(24*num_opt_days) opt_state = opt_states.pop(opt_start) p = ptgsk.region_model.get_region_parameter() p_opt = ptgsk.optimize(TimeAxisFixedDeltaT(opt_start, dt, 24*num_opt_days), opt_state, target_spec_vec, p, p_min, p_max, tr_stop=1.0e-5) ptgsk.region_model.set_region_parameter(p_opt) corr_state = adjust_simulator_state(ptgsk, curr_time, q_obs_m3s_ts) ptgsk.run(TimeAxisFixedDeltaT(curr_time, dt, 24), corr_state) curr_time += deltahours(24) opt_states[curr_time] = ptgsk.reg_model_state discharge = ptgsk.region_model.statistics.discharge([0]) times.extend(discharge.time(i) for i in range(discharge.size())) values.extend(list(np.array(discharge.v))) plt.plot(utc_to_greg(times), values) plot_results(None, q_obs=observed_tistel_discharge(UtcPeriod(recal_start, recal_stop))) set_calendar_formatter(Calendar()) #plt.interactive(1) plt.title("Continuously recalibrated discharge vs observed") plt.xlabel("Time in UTC") plt.ylabel(r"Discharge in $\mathbf{m^3s^{-1}}$", verticalalignment="top", rotation="horizontal") plt.gca().yaxis.set_label_coords(0, 1.1)
def test_run_geo_ts_data_config_simulator(self): # These config files are versioned in shyft-data git config_dir = path.join(shyftdata_dir, "neanidelv", "yaml_config") config_file = path.join(config_dir, "neanidelva_simulation.yaml") config_section = "neanidelva" cfg = YAMLSimConfig(config_file, config_section, overrides={'config': { 'number_of_steps': 168 }}) # These config files are versioned in shyft-data git. Read from ${SHYFTDATA}/netcdf/orchestration-testdata/ simulator = ConfigSimulator(cfg) # Regression tests on interpolation parameters self.assertAlmostEqual( simulator.region_model.interpolation_parameter.precipitation. scale_factor, 1.01, 3) #n_cells = simulator.region_model.size() state_repos = DefaultStateRepository(simulator.region_model) simulator.run(cfg.time_axis, state_repos.get_state(0)) cids = IntVector() discharge = simulator.region_model.statistics.discharge(cids) # Regression tests on discharge values self.assertAlmostEqual(discharge.values[0], 80.23843199, 3) self.assertAlmostEqual(discharge.values[3], 82.50344985, 3) # Regression tests on geo fractions self.assertAlmostEqual( simulator.region_model.cells[0].geo.land_type_fractions_info(). unspecified(), 1.0, 3) self.assertAlmostEqual( simulator.region_model.cells[2].geo.land_type_fractions_info(). unspecified(), 0.1433, 3) self.assertAlmostEqual( simulator.region_model.cells[2].geo.land_type_fractions_info(). forest(), 0.0, 3) self.assertAlmostEqual( simulator.region_model.cells[2].geo.land_type_fractions_info(). reservoir(), 0.8566, 3)
def test_run_geo_ts_data_config_simulator(self): # set up configuration config_dir = path.join(path.dirname(__file__), "netcdf") config_file = path.join(config_dir, "neanidelva_simulation.yaml") config_section = "neanidelva" cfg = YAMLSimConfig(config_file, config_section) # get a simulator simulator = ConfigSimulator(cfg) n_cells = simulator.region_model.size() state_repos = DefaultStateRepository(simulator.region_model.__class__, n_cells) simulator.run(cfg.time_axis, state_repos.get_state(0)) cids = IntVector() discharge = simulator.region_model.statistics.discharge(cids) # regression test on discharge values self.assertAlmostEqual(discharge.values[0], 0.1961, 3) self.assertAlmostEqual(discharge.values[3], 2.7582, 3) self.assertAlmostEqual(discharge.values[6400], 58.9381, 3) self.assertAlmostEqual(discharge.values[3578], 5.5069, 3) # regression test on geo fractions self.assertAlmostEqual( simulator.region_model.cells[0].geo.land_type_fractions_info(). unspecified(), 1.0, 3) self.assertAlmostEqual( simulator.region_model.cells[2].geo.land_type_fractions_info(). unspecified(), 0.1433, 3) self.assertAlmostEqual( simulator.region_model.cells[2].geo.land_type_fractions_info(). forest(), 0.0, 3) self.assertAlmostEqual( simulator.region_model.cells[2].geo.land_type_fractions_info(). reservoir(), 0.8566, 3) self.assertAlmostEqual( simulator.region_model.cells[3383].geo.land_type_fractions_info(). lake(), 0.7432, 3) self.assertAlmostEqual( simulator.region_model.cells[652].geo.land_type_fractions_info(). glacier(), 0.1351, 3)
def test_run_geo_ts_data_config_simulator(self): # These config files are versioned in shyft git config_dir = path.join(path.dirname(__file__), "netcdf") config_file = path.join(config_dir, "neanidelva_simulation.yaml") config_section = "neanidelva" cfg = YAMLSimConfig(config_file, config_section, overrides={'config': { 'number_of_steps': 168 }}) # These config files are versioned in shyft-data git. Read from ${SHYFTDATA}/netcdf/orchestration-testdata/ # TODO: Put all config files needed to run this test under the same versioning system (shyft git) simulator = ConfigSimulator(cfg) #n_cells = simulator.region_model.size() state_repos = DefaultStateRepository(simulator.region_model) simulator.run(cfg.time_axis, state_repos.get_state(0)) cids = IntVector() discharge = simulator.region_model.statistics.discharge(cids) # Regression tests on discharge values self.assertAlmostEqual(discharge.values[0], 0.1001063, 3) self.assertAlmostEqual(discharge.values[3], 3.9141928, 3) # Regression tests on geo fractions self.assertAlmostEqual( simulator.region_model.cells[0].geo.land_type_fractions_info(). unspecified(), 1.0, 3) self.assertAlmostEqual( simulator.region_model.cells[2].geo.land_type_fractions_info(). unspecified(), 0.1433, 3) self.assertAlmostEqual( simulator.region_model.cells[2].geo.land_type_fractions_info(). forest(), 0.0, 3) self.assertAlmostEqual( simulator.region_model.cells[2].geo.land_type_fractions_info(). reservoir(), 0.8566, 3)
def test_functionality_hosting_localhost(self): # setup data to be calculated utc = Calendar() d = deltahours(1) d24 = deltahours(24) n = 240 n24 = 10 t = utc.time(2016, 1, 1) ta = TimeAxis(t, d, n) ta24 = TimeAxis(t, d24, n24) n_ts = 100 percentile_list = IntVector([0, 35, 50, 65, 100]) tsv = TsVector() store_tsv = TsVector() # something we store at server side for i in range(n_ts): pts = TimeSeries(ta, np.linspace(start=0, stop=1.0, num=ta.size()), point_fx.POINT_AVERAGE_VALUE) tsv.append(float(1 + i / 10) * pts) store_tsv.append(TimeSeries("cache://test/{0}".format(i), pts)) # generate a bound pts to store dummy_ts = TimeSeries('dummy://a') tsv.append(dummy_ts.integral(ta)) self.assertGreater(len(ts_stringify(tsv[0])), 10) # just ensure ts_stringify work on expr. # then start the server dtss = DtsServer() port_no = find_free_port() host_port = 'localhost:{0}'.format(port_no) dtss.set_listening_port(port_no) dtss.cb = self.dtss_read_callback dtss.find_cb = self.dtss_find_callback dtss.store_ts_cb = self.dtss_store_callback dtss.start_async() dts = DtsClient(StringVector([host_port]), True, 1000) # as number of hosts # then try something that should work dts.store_ts(store_tsv) r1 = dts.evaluate(tsv, ta.total_period()) tsv1x = tsv.inside(-0.5, 0.5) tsv1x.append(tsv1x[-1].decode( start_bit=1, n_bits=1)) # just to verify serialization/bind tsv1x.append(store_tsv[1].derivative()) tsv1x.append(store_tsv[1].pow( 2.0)) # just for verify pow serialization(well, it's a bin-op..) r1x = dts.evaluate(tsv1x, ta.total_period()) r2 = dts.percentiles(tsv, ta.total_period(), ta24, percentile_list) r3 = dts.find('netcdf://dummy\.nc/ts\d') self.rd_throws = True ex_count = 0 try: rx = dts.evaluate(tsv, ta.total_period()) except RuntimeError as e: ex_count = 1 pass self.rd_throws = True try: fx = dts.find('should throw') except RuntimeError as e: ex_count += 1 pass dts.close() # close connection (will use context manager later) dtss.clear() # close server self.assertEqual(ex_count, 2) self.assertEqual(len(r1), len(tsv)) self.assertEqual(self.callback_count, 4) for i in range(n_ts - 1): self.assertEqual(r1[i].time_axis, tsv[i].time_axis) assert_array_almost_equal(r1[i].values.to_numpy(), tsv[i].values.to_numpy(), decimal=4) self.assertEqual(len(r2), len(percentile_list)) dummy_ts.bind( TimeSeries(ta, fill_value=1.0, point_fx=point_fx.POINT_AVERAGE_VALUE)) p2 = tsv.percentiles(ta24, percentile_list) # r2 = tsv.percentiles(ta24,percentile_list) for i in range(len(p2)): self.assertEqual(r2[i].time_axis, p2[i].time_axis) assert_array_almost_equal(r2[i].values.to_numpy(), p2[i].values.to_numpy(), decimal=1) self.assertEqual(self.find_count, 2) self.assertEqual(len(r3), 10) # 0..9 for i in range(len(r3)): self.assertEqual(r3[i], self.ts_infos[i]) self.assertIsNotNone(r1x) self.assertEqual(1, len(self.stored_tsv)) self.assertEqual(len(store_tsv), len(self.stored_tsv[0])) for i in range(len(store_tsv)): self.assertEqual(self.stored_tsv[0][i].ts_id(), store_tsv[i].ts_id())