Ejemplo n.º 1
0
 def test_forecast_average_slice(self):
     """
     Demo and test TsVector.average_slice(lead_time,dt,n)
     """
     utc = Calendar()
     t0 = utc.time(2017, 1, 1)
     dt = deltahours(1)
     n = 66  # typical arome
     fc_dt_n_hours = 6
     fc_dt = deltahours(fc_dt_n_hours)
     fc_n = 4 * 10  # 4 each day 10 days
     fc_v = self._create_forecasts(t0, dt, n, fc_dt, fc_n)
     for lead_time_hours in range(12):
         for slice_length_units in [1, 2, 3, 4, 6, 12]:
             for dt_hours in [1, 2, 3]:
                 slice_v = fc_v.average_slice(deltahours(lead_time_hours),
                                              deltahours(dt_hours),
                                              slice_length_units)
                 self.assertEqual(len(slice_v), len(fc_v))
                 # then loop over the slice_v and prove it's equal
                 # to the average of the same portion on the originalj
                 for s, f in zip(slice_v, fc_v):
                     ta = TimeAxis(
                         f.time_axis.time(0) + deltahours(lead_time_hours),
                         deltahours(dt_hours), slice_length_units)
                     ts_expected = f.average(ta)
                     self.assertTrue(s.time_axis == ts_expected.time_axis)
                     self.assertTrue(
                         np.allclose(s.values.to_numpy(),
                                     ts_expected.values.to_numpy()))
     pass
Ejemplo n.º 2
0
    def test_raise_exception_when_no_data_in_request_period(self):
        utc_calendar = Calendar()
        netcdf_repository = self._construct_from_test_data()
        netcdf_repository.raise_if_no_data = True  # yes, for now, just imagine this could work.
        self.assertIsNotNone(netcdf_repository)
        utc_period = UtcPeriod(
            utc_calendar.time(YMDhms(2017, 1, 1, 0, 0,
                                     0)),  # a period where there is no data in
            utc_calendar.time(YMDhms(2020, 12, 31, 0, 0,
                                     0)))  # the file supplied
        type_source_map = dict()
        type_source_map['temperature'] = TemperatureSource

        #def test_function():
        #
        #    return netcdf_repository.get_timeseries(
        #                                        type_source_map,
        #                                        geo_location_criteria=None,
        #                                        utc_period=utc_period)

        #self.assertRaises(RuntimeError, test_function)
        self.assertRaises(
            RuntimeError, netcdf_repository.get_timeseries, type_source_map,
            **{
                'geo_location_criteria': None,
                'utc_period': utc_period
            })
Ejemplo n.º 3
0
    def test_fixed_tsv_empty(self) -> None:
        """Test that an empty TsVector is generated by fixed_tsv when given an empty sequence of values."""
        cal = Calendar()
        period = UtcPeriod(cal.time(2017, 1, 1), cal.time(2018, 1, 1))

        tsv = fixed_tsv(period, [])
        self.assertEqual(len(tsv), 0)
Ejemplo n.º 4
0
 def test_glacier_melt_ts_m3s(self):
     utc = Calendar()
     t0 = utc.time(2016, 10, 1)
     dt = deltahours(1)
     n = 240
     ta = TimeAxis(t0, dt, n)
     area_m2 = 487 * 1000 * 1000  # Jostedalsbreen, largest i Europe
     temperature = TimeSeries(ta=ta,
                              fill_value=10.0,
                              point_fx=fx_policy.POINT_AVERAGE_VALUE)
     sca_values = dv.from_numpy(np.linspace(area_m2 * 1.0, 0.0, num=n))
     sca = TimeSeries(ta=ta,
                      values=sca_values,
                      point_fx=fx_policy.POINT_AVERAGE_VALUE)
     gf = 1.0 * area_m2
     dtf = 6.0
     melt_m3s = create_glacier_melt_ts_m3s(
         temperature, sca, gf,
         dtf)  # Here we get back a melt_ts, that we can do ts-stuff with
     self.assertIsNotNone(melt_m3s)
     full_melt_m3s = glacier_melt_step(dtf, 10.0, 0.0, gf)
     expected_melt_m3s = np.linspace(0.0, full_melt_m3s, num=n)
     assert_array_almost_equal(expected_melt_m3s,
                               melt_m3s.values.to_numpy(), 4)
     # Just to check we can work with the result as a ts in all aspects
     mx2 = melt_m3s * 2.0
     emx2 = expected_melt_m3s * 2.0
     assert_array_almost_equal(emx2, mx2.values.to_numpy(), 4)
Ejemplo n.º 5
0
    def test_windowed_percentiles_tsv_values(self) -> None:
        """Test that a TsVector is generated by windowed_percentiles_tsv with time-series
        fulfilling some properties of being percentiles of the data ts."""
        cal = Calendar()
        period = UtcPeriod(cal.time(2017, 1, 1), cal.time(2018, 1, 1))

        data = np.linspace(-2, 2, 24 * 7)
        data_ts = TimeSeries(TimeAxis(0, Calendar.HOUR, len(data)), data,
                             POINT_INSTANT_VALUE)

        # compute
        percentiles = [0, 10, 50, 90, 100]
        tsv = windowed_percentiles_tsv(data_ts, period, 3 * Calendar.HOUR,
                                       12 * Calendar.HOUR, percentiles,
                                       self.client, cal)
        self.assertEqual(len(tsv), 5)

        # assert that the time-series have the correct properties for being percentile series
        for i in range(len(tsv[0])):
            prev_v = tsv[0].values[i]
            for j in range(len(percentiles) - 1):
                v = tsv[j + 1].values[i]
                # both values will be NaN at the end - that is ok
                if math.isnan(prev_v) and math.isnan(v):
                    continue
                # check that no larger percentile have values greater than lower percentiles
                self.assertLessEqual(prev_v, v)
                prev_v = v
Ejemplo n.º 6
0
    def test_error_handling(self):
        utc = Calendar()
        t0 = utc.time(2018, 1, 1)
        dt = deltahours(1)
        dv = DoubleVector()
        dv[:] = [1.0, 2.0, 2.5, 1.9, 3.0, 3.1,
                 float('nan')]  # also verify nan-handling
        ts = TimeSeries(TimeAxis(t0, dt, len(dv)), dv, POINT_AVERAGE_VALUE)
        try:
            ts.decode(start_bit=0, n_bits=0)
            self.assertTrue(False, 'This should throw, n_bits >0')
        except RuntimeError as re:
            pass

        try:
            ts.decode(start_bit=41, n_bits=12)
            self.assertTrue(False, 'This should throw, start_bit + n_bits >52')
        except RuntimeError as re:
            pass

        try:
            ts.decode(start_bit=-1, n_bits=12)
            self.assertTrue(False, 'This should throw, start_bit >=0')
        except RuntimeError as re:
            pass
Ejemplo n.º 7
0
def ensemble_demo():
    utc = Calendar()
    t_start = utc.time(YMDhms(2011, 9, 1))
    t_fc_ens_start = utc.time(YMDhms(2015, 7, 26))
    disp_start = utc.time(YMDhms(2015, 7, 20))
    dt = deltahours(1)
    n_obs = int(round((t_fc_ens_start - t_start)/dt))
    n_fc_ens = 30
    n_disp = int(round(t_fc_ens_start - disp_start)/dt) + n_fc_ens + 24*7

    obs_time_axis = Timeaxis(t_start, dt, n_obs + 1)
    fc_ens_time_axis = Timeaxis(t_fc_ens_start, dt, n_fc_ens)
    display_time_axis = Timeaxis(disp_start, dt, n_disp)

    q_obs_m3s_ts = observed_tistel_discharge(obs_time_axis.total_period())
    ptgsk = create_tistel_simulator(PTGSKOptModel, tistel.geo_ts_repository(tistel.grid_spec.epsg()))
    initial_state = burn_in_state(ptgsk, t_start, utc.time(YMDhms(2012, 9, 1)), q_obs_m3s_ts)

    ptgsk.run(obs_time_axis, initial_state)
    current_state = adjust_simulator_state(ptgsk, t_fc_ens_start, q_obs_m3s_ts)
    q_obs_m3s_ts = observed_tistel_discharge(display_time_axis.total_period())
    ens_repos = tistel.arome_ensemble_repository(tistel.grid_spec)
    ptgsk_fc_ens = create_tistel_simulator(PTGSKModel, ens_repos)
    sims = ptgsk_fc_ens.create_ensembles(fc_ens_time_axis, t_fc_ens_start, current_state)
    for sim in sims:
        sim.simulate()
    plt.hold(1)
    percentiles = [10, 25, 50, 75, 90]
    plot_percentiles(sims, percentiles, obs=q_obs_m3s_ts)
    #plt.interactive(1)
    plt.show()
Ejemplo n.º 8
0
def plot_results(ptgsk, q_obs=None):
    h_obs = None
    if ptgsk is not None:
        plt.subplot(3, 1, 1)
        discharge = ptgsk.region_model.statistics.discharge([0])
        temp = ptgsk.region_model.statistics.temperature([0])
        precip = ptgsk.region_model.statistics.precipitation([0])
        # Results on same time axis, so we only need one
        times = utc_to_greg(
            [discharge.time(i) for i in range(discharge.size())])
        plt.plot(times, np.array(discharge.v))
        plt.gca().set_xlim(times[0], times[-1])
        plt.ylabel(r"Discharge in $\mathbf{m^3s^{-1}}$")
        set_calendar_formatter(Calendar())
    if q_obs is not None:
        obs_times = utc_to_greg([q_obs.time(i) for i in range(q_obs.size())])
        ovs = [q_obs.value(i) for i in range(q_obs.size())]
        h_obs, = plt.plot(obs_times, ovs, linewidth=2, color='k')
        ax = plt.gca()
        ax.set_xlim(obs_times[0], obs_times[-1])
    if ptgsk is not None:
        plt.subplot(3, 1, 2)
        plt.plot(times, np.array(temp.v))
        set_calendar_formatter(Calendar())
        plt.gca().set_xlim(times[0], times[-1])
        plt.ylabel(r"Temperature in C")
        plt.subplot(3, 1, 3)
        plt.plot(times, np.array(precip.v))
        set_calendar_formatter(Calendar())
        plt.gca().set_xlim(times[0], times[-1])
        plt.ylabel(r"Precipitation in mm")
    return h_obs
Ejemplo n.º 9
0
 def test_inverted_values(self):
     utc = Calendar()
     t0 = utc.time(2018, 1, 1)
     dt = deltahours(1)
     dv = DoubleVector()
     dv[:] = [1.0, 2.0, 2.5, 1.9, 3.0, 3.1, float('nan')]  # also verify nan-handling
     i1_ex = [1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0]
     ts = TimeSeries(TimeAxis(t0, dt, len(dv)), dv, POINT_AVERAGE_VALUE)
     i2 = ts.inside(min_v=2.0, max_v=3.0, nan_v=1.0, inside_v=0.0, outside_v=1.0)
     assert_array_almost_equal(i2.values.to_numpy(), np.array(i1_ex))
Ejemplo n.º 10
0
 def test_simple_case(self):
     utc = Calendar()
     t0 = utc.time(2018, 1, 1)
     dt = deltahours(1)
     dv = DoubleVector()
     dv[:] = [1.0, 2.0, 2.5, 1.9, 3.0, 3.1, -1.0]
     i1_ex = [0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0]
     ts = TimeSeries(TimeAxis(t0, dt, len(dv)), dv, POINT_AVERAGE_VALUE)
     i1 = ts.inside(2.0, 3.0)
     assert_array_almost_equal(i1.values.to_numpy(), np.array(i1_ex))
Ejemplo n.º 11
0
 def test_construct_repository(self):
     utc_calendar = Calendar()
     netcdf_repository = self._construct_from_test_data()
     self.assertIsNotNone(netcdf_repository)
     utc_period = UtcPeriod(
         utc_calendar.time(YMDhms(2005, 1, 1, 0, 0, 0)),
         utc_calendar.time(YMDhms(2014, 12, 31, 0, 0, 0)))
     type_source_map = dict()
     type_source_map['temperature'] = TemperatureSource
     geo_ts_dict = netcdf_repository.get_timeseries(
         type_source_map, geo_location_criteria=None, utc_period=utc_period)
     self.assertIsNotNone(geo_ts_dict)
Ejemplo n.º 12
0
    def test_fixed_tsv_values(self) -> None:
        """Test that a TsVector with fixed constant values is generated by fixed_tsv when given
        a sequence of values."""
        cal = Calendar()
        period = UtcPeriod(cal.time(2017, 1, 1), cal.time(2018, 1, 1))

        values = [12, 15.5]
        tsv = fixed_tsv(period, values)
        self.assertEqual(len(tsv), 2)
        for v, ts in zip(values, tsv):
            for ts_v in ts.values:
                self.assertEqual(ts_v, v)
Ejemplo n.º 13
0
    def test_dtss_remove_series(self):
        with tempfile.TemporaryDirectory() as c_dir:

            # start the server
            dtss = DtsServer()
            port_no = find_free_port()
            host_port = 'localhost:{0}'.format(port_no)
            dtss.set_listening_port(port_no)
            dtss.set_container(
                "test", c_dir
            )  # notice we set container 'test' to point to c_dir directory
            dtss.start_async(
            )  # the internal shyft time-series will be stored to that container

            # setup some data
            utc = Calendar()
            d = deltahours(1)
            n = 365 * 24 // 3
            t = utc.time(2016, 1, 1)
            ta = TimeAxis(t, d, n)
            tsv = TsVector()
            pts = TimeSeries(ta, np.linspace(start=0, stop=1.0, num=ta.size()),
                             point_fx.POINT_AVERAGE_VALUE)
            tsv.append(TimeSeries("cache://test/foo", pts))

            # get a client
            client = DtsClient(host_port)
            client.store_ts(tsv)

            # start with no removing
            dtss.set_can_remove(False)

            # we should be disallowed to remove now
            try:
                client.remove("shyft://test/foo")
            except Exception as err:
                self.assertEqual(
                    str(err), "dtss::server: server does not support removing")

            # then try with allowing remove
            dtss.set_can_remove(True)

            # we only support removing shyft-url style data
            try:
                client.remove("protocol://test/foo")
            except Exception as err:
                self.assertEqual(
                    str(err),
                    "dtss::server: server does not allow removing for non shyft-url type data"
                )

            # now it should work
            client.remove("shyft://test/foo")
Ejemplo n.º 14
0
    def test_crudf_cycle(self):
        """
        Verify we can create, store, read, find and delete state in the state repository
        
        """

        # arrange, by creating one State
        cal = Calendar()
        utc_timestamp = cal.time(2001, 1, 1)
        region_model_id = "neanidelv-ptgsk"
        n_cells = 10
        tags = ["initial", "unverified"]
        state_vector = self._create_state_vector(n_cells)
        self.assertIsNotNone(
            state_vector,
            "we should have a valid state vector object at this spot")
        # now start state_repository test
        state_repository = YamlStateRepository(
            directory_path=self._test_state_directory,
            state_serializer=StateSerializer(PTGSKStateWithIdVector))
        # put in two states, record the unique state_id..
        state_id_1 = state_repository.put_state(region_model_id, utc_timestamp,
                                                state_vector, tags)
        state_id_2 = state_repository.put_state(region_model_id, utc_timestamp,
                                                state_vector, tags)
        # assert that we got two unique state_id
        self.assertIsNotNone(state_id_1, "We expect back a unique id")
        self.assertIsNotNone(state_id_2, "We expect back a unique id")
        self.assertNotEqual(
            state_id_1, state_id_2,
            "storing two state, same model, same time, each state should be stored with a unique id"
        )
        # now we should have two states in the repository
        state_infos = state_repository.find_state()
        self.assertEqual(2, len(state_infos),
                         "We just stored two, expect two back..")
        # extra test, verify that we really stored the state (using kirchner q)
        state_1 = state_repository.get_state(state_id_1)
        self.assertEqual(n_cells, len(state_1),
                         "expect to get back state with same number of cells")
        for i in range(n_cells):
            self.assertAlmostEqual(
                state_1[i].state.kirchner.q, state_vector[i].state.kirchner.q,
                3, "state repository should preserve state...")
        # now remove state
        state_repository.delete_state(state_id_1)
        # check that we got just one left, and that it is the correct one..
        state_list = state_repository.find_state()
        self.assertEqual(1, len(state_list))
        self.assertEqual(state_list[0].region_model_id, region_model_id)
        self.assertEqual(state_list[0].utc_timestamp, utc_timestamp)
        self.assertEqual(state_list[0].state_id, state_id_2)
 def test_returns_empty_ts_when_no_data_in_request_period(self):
     utc_calendar = Calendar()
     netcdf_repository = self._construct_from_test_data()
     self.assertIsNotNone(netcdf_repository)
     utc_period = UtcPeriod(
         utc_calendar.time(2017, 1, 1, 0, 0,
                           0),  # a period where there is no data in
         utc_calendar.time(2020, 12, 31, 0, 0, 0))  # the file supplied
     type_source_map = dict()
     type_source_map['temperature'] = TemperatureSource
     geo_ts_dict = netcdf_repository.get_timeseries(
         type_source_map, geo_location_criteria=None, utc_period=utc_period)
     self.assertIsNotNone(geo_ts_dict)
Ejemplo n.º 16
0
 def test_returns_empty_ts_when_no_data_in_request_period(self):
     utc_calendar = Calendar()
     netcdf_repository = self._construct_from_test_data()
     self.assertIsNotNone(netcdf_repository)
     utc_period = UtcPeriod(utc_calendar.time(YMDhms(2017, 1, 1, 0, 0, 0)),# a period where there is no data in
                            utc_calendar.time(YMDhms(2020, 12, 31, 0, 0, 0)))# the file supplied
     type_source_map = dict()
     type_source_map['temperature'] = TemperatureSource
     geo_ts_dict = netcdf_repository.get_timeseries(
                                             type_source_map,
                                             geo_location_criteria=None,
                                             utc_period=utc_period)
     self.assertIsNotNone(geo_ts_dict)
Ejemplo n.º 17
0
 def test_construct_repository(self):
     utc_calendar = Calendar()
     netcdf_repository = self._construct_from_test_data()
     self.assertIsNotNone(netcdf_repository)
     utc_period = UtcPeriod(utc_calendar.time(YMDhms(2005, 1, 1, 0, 0, 0)),
                            utc_calendar.time(YMDhms(2014, 12, 31, 0, 0, 0)))
     type_source_map = dict()
     type_source_map['temperature'] = TemperatureSource
     geo_ts_dict =  netcdf_repository.get_timeseries(
                                             type_source_map,
                                             geo_location_criteria=None,
                                             utc_period=utc_period)
     self.assertIsNotNone(geo_ts_dict)
Ejemplo n.º 18
0
 def test_convolve_policy(self):
     utc = Calendar()
     ts = TimeSeries(ta=TimeAxisFixedDeltaT(utc.time(2001, 1, 1),
                                            deltahours(1), 24),
                     fill_value=10.0,
                     point_fx=point_fx.POINT_AVERAGE_VALUE)
     w = DoubleVector.from_numpy([0.05, 0.15, 0.6, 0.15, 0.05])
     cts = ts.convolve_w(w, convolve_policy.USE_FIRST
                         )  # ensure mass-balance between source and cts
     self.assertIsNotNone(cts)
     self.assertEquals(len(cts), len(ts))
     self.assertEquals(cts.values.to_numpy().sum(),
                       ts.values.to_numpy().sum())
Ejemplo n.º 19
0
 def test_hbv_snow_step(self):
     utc = Calendar()
     s = HbvSnowState()
     p = HbvSnowParameter()
     s.distribute(p)
     r = HbvSnowResponse()
     calc = HbvSnowCalculator(p)
     t0 = utc.time(2016, 10, 1)
     t1 = utc.time(2016, 10, 2)
     dt = deltahours(1)
     temp = 0.4
     prec_mm_h = 0.3
     # Just check that we don't get an error when stepping
     calc.step(s, r, t0, t1, prec_mm_h, temp)
Ejemplo n.º 20
0
    def test_windowed_percentiles_tsv_empty(self) -> None:
        """Test that an empty TsVector is generated by windowed_percentiles_tsv
        when given an empty sequence of percentiles."""
        cal = Calendar()
        period = UtcPeriod(cal.time(2017, 1, 1), cal.time(2018, 1, 1))

        data = np.linspace(-2, 2, 24 * 7)
        data_ts = TimeSeries(TimeAxis(0, Calendar.HOUR, len(data)), data,
                             POINT_INSTANT_VALUE)

        # compute
        tsv = windowed_percentiles_tsv(data_ts, period, Calendar.HOUR,
                                       Calendar.HOUR, [], self.client, cal)
        self.assertEqual(len(tsv), 0)
Ejemplo n.º 21
0
    def test_failures(self):
        """
        Verify that dtss client server connections are auto-magically
        restored and fixed
        """
        with tempfile.TemporaryDirectory() as c_dir:

            # start the server
            dtss = DtsServer()
            port_no = find_free_port()
            host_port = 'localhost:{0}'.format(port_no)
            dtss.set_listening_port(port_no)
            dtss.set_container(
                "test", c_dir
            )  # notice we set container 'test' to point to c_dir directory
            dtss.start_async(
            )  # the internal shyft time-series will be stored to that container

            # setup some data
            utc = Calendar()
            d = deltahours(1)
            n = 365 * 24 // 3
            t = utc.time(2016, 1, 1)
            ta = TimeAxis(t, d, n)
            tsv = TsVector()
            pts = TimeSeries(ta, np.linspace(start=0, stop=1.0, num=ta.size()),
                             point_fx.POINT_AVERAGE_VALUE)
            tsv.append(TimeSeries("cache://test/foo", pts))

            # get a client
            client = DtsClient(host_port, auto_connect=False)
            client.store_ts(tsv)
            client.close()
            client.store_ts(tsv)  # should just work, it re-open automagically
            dtss.clear(
            )  # the server is out and away, no chance this would work
            try:
                client.store_ts(tsv)
                self.assertTrue(
                    False,
                    'This should throw, because there is no dtss server to help you'
                )
            except Exception as ee:
                self.assertFalse(False, f'expected {ee} here')

            dtss.set_listening_port(port_no)
            dtss.start_async()
            client.store_ts(
                tsv)  # this should just work, automagically reconnect
Ejemplo n.º 22
0
def continuous_calibration():
    utc = Calendar()
    t_start = utc.time(YMDhms(2011, 9, 1))
    t_fc_start = utc.time(YMDhms(2015, 10, 1))
    dt = deltahours(1)
    n_obs = int(round((t_fc_start - t_start)/dt))
    obs_time_axis = TimeAxisFixedDeltaT(t_start, dt, n_obs + 1)
    q_obs_m3s_ts = observed_tistel_discharge(obs_time_axis.total_period())

    ptgsk = create_tistel_simulator(PTGSKOptModel, tistel.geo_ts_repository(tistel.grid_spec.epsg()))
    initial_state = burn_in_state(ptgsk, t_start, utc.time(YMDhms(2012, 9, 1)), q_obs_m3s_ts)

    num_opt_days = 30
    # Step forward num_opt_days days and store the state for each day:
    recal_start = t_start + deltahours(num_opt_days*24)
    t = t_start
    state = initial_state
    opt_states = {t: state}
    while t < recal_start:
        ptgsk.run(TimeAxisFixedDeltaT(t, dt, 24), state)
        t += deltahours(24)
        state = ptgsk.reg_model_state
        opt_states[t] = state

    recal_stop = utc.time(YMDhms(2011, 10, 30))
    recal_stop = utc.time(YMDhms(2012, 5, 30))
    curr_time = recal_start
    q_obs_avg = TsTransform().to_average(t_start, dt, n_obs + 1, q_obs_m3s_ts)
    target_spec = TargetSpecificationPts(q_obs_avg, IntVector([0]), 1.0, KLING_GUPTA)
    target_spec_vec = TargetSpecificationVector([target_spec])
    i = 0
    times = []
    values = []
    p, p_min, p_max = construct_calibration_parameters(ptgsk)
    while curr_time < recal_stop:
        print(i)
        i += 1
        opt_start = curr_time - deltahours(24*num_opt_days)
        opt_state = opt_states.pop(opt_start)
        p = ptgsk.region_model.get_region_parameter()
        p_opt = ptgsk.optimize(TimeAxisFixedDeltaT(opt_start, dt, 24*num_opt_days), opt_state, target_spec_vec,
                               p, p_min, p_max, tr_stop=1.0e-5)
        ptgsk.region_model.set_region_parameter(p_opt)
        corr_state = adjust_simulator_state(ptgsk, curr_time, q_obs_m3s_ts)
        ptgsk.run(TimeAxisFixedDeltaT(curr_time, dt, 24), corr_state)
        curr_time += deltahours(24)
        opt_states[curr_time] = ptgsk.reg_model_state
        discharge = ptgsk.region_model.statistics.discharge([0])
        times.extend(discharge.time(i) for i in range(discharge.size()))
        values.extend(list(np.array(discharge.v)))
    plt.plot(utc_to_greg(times), values)
    plot_results(None, q_obs=observed_tistel_discharge(UtcPeriod(recal_start, recal_stop)))
    set_calendar_formatter(Calendar())
    #plt.interactive(1)
    plt.title("Continuously recalibrated discharge vs observed")
    plt.xlabel("Time in UTC")
    plt.ylabel(r"Discharge in $\mathbf{m^3s^{-1}}$", verticalalignment="top", rotation="horizontal")
    plt.gca().yaxis.set_label_coords(0, 1.1)
Ejemplo n.º 23
0
def plot_percentiles(sim, percentiles, obs=None):
    discharges = [s.region_model.statistics.discharge([0]) for s in sim]
    times = utc_to_greg(np.array([discharges[0].time(i) for i in range(discharges[0].size())], dtype='d'))
    all_discharges = np.array([d.v for d in discharges])
    perc_arrs = [a for a in np.percentile(all_discharges, percentiles, 0)]
    h, fill_handles = plot_np_percentiles(times, perc_arrs, base_color=(51/256, 102/256, 193/256))
    percentile_texts = ["{} - {}".format(percentiles[i], percentiles[-(i + 1)]) for i in range(len(percentiles)//2)]
    ax = plt.gca()
    maj_loc = AutoDateLocator(tz=pytz.UTC, interval_multiples=True)
    ax.xaxis.set_major_locator(maj_loc)
    set_calendar_formatter(Calendar())
    if len(percentiles) % 2:
        fill_handles.append(h[0])
        percentile_texts.append("{}".format(percentiles[len(percentiles)//2]))
    if obs is not None:
        h_obs = plot_results(None, obs)
        fill_handles.append(h_obs)
        percentile_texts.append("Observed")

    ax.legend(fill_handles, percentile_texts)
    ax.grid(b=True, color=(51/256, 102/256, 193/256), linewidth=0.1, linestyle='-', axis='y')
    plt.xlabel("Time in UTC")
    plt.ylabel(r"Discharge in $\mathbf{m^3s^{-1}}$", verticalalignment="top", rotation="horizontal")
    ax.yaxis.set_label_coords(0, 1.1)
    return h, ax
Ejemplo n.º 24
0
 def test_hbv_physical_snow_step(self):
     utc = Calendar()
     s = HbvPhysicalSnowState()
     p = HbvPhysicalSnowParameter()
     r = HbvPhysicalSnowResponse()
     s.distribute(p)
     calc = HbvPhysicalSnowCalculator(p)
     t = utc.time(2016, 10, 1)
     dt = deltahours(1)
     temp = 0.4
     rad = 12.0
     prec_mm_h = 0.3
     wind_speed = 1.3
     rel_hum = 0.4
     # Just check that we don't get an error when stepping
     calc.step(s, r, t, dt, temp, rad, prec_mm_h, wind_speed, rel_hum)
Ejemplo n.º 25
0
 def test_find_with_region_model_filter(self):
     cal = Calendar()
     utc_timestamp = cal.time(YMDhms(2001, 1, 1))
     region_model_id = "neanidelv-ptgsk"
     n_cells = 10
     tags = ["initial", "unverified"]
     state_vector = self._create_state_vector(n_cells)
     # now start state_repository test
     state_repository = YamlStateRepository(self._test_state_directory)
     # put in two states, record the unique state_id..
     state_repository.put_state(region_model_id, utc_timestamp, state_vector, tags)
     state_repository.put_state("tokke-ptgsk", utc_timestamp, state_vector, tags)
     all_states = state_repository.find_state()
     neanidelv_states = state_repository.find_state(region_model_id)
     self.assertEqual(2, len(all_states))
     self.assertEqual(1, len(neanidelv_states))
     self.assertEqual(neanidelv_states[0].region_model_id, region_model_id)
 def test_construct_repository(self):
     utc_calendar = Calendar()
     netcdf_repository = self._construct_from_test_data()
     self.assertIsNotNone(netcdf_repository)
     utc_period = UtcPeriod(utc_calendar.time(2005, 1, 1, 0, 20, 0), # make it a challenge! ensure we get the first hour
                            utc_calendar.time(2014, 12, 30, 0, 20, 0)) # and also, we should have the last hour!
     type_source_map = dict()
     type_source_map['temperature'] = TemperatureSource
     geo_ts_dict = netcdf_repository.get_timeseries(
                                             type_source_map,
                                             geo_location_criteria=None,
                                             utc_period=utc_period)
     self.assertIsNotNone(geo_ts_dict)
     temperature_source = geo_ts_dict['temperature']
     self.assertIsNotNone(temperature_source)
     self.assertLessEqual(temperature_source[0].ts.time_axis.time(0),utc_period.start,'expect returned time-axis to cover the requested period')
     self.assertGreaterEqual(temperature_source[0].ts.time_axis.total_period().end,utc_period.end,'expected returned time-axis to cover requested period')
Ejemplo n.º 27
0
    def test_inside_of_derivative(self):
        """Created in response to https://github.com/statkraft/shyft/issues/352"""
        values = [1, 1, 1, 1]
        utc = Calendar()
        data = np.array(values, dtype='float64')
        data_ta = TimeAxis(utc.time(2015, 1, 1), 3600, len(data))

        orig = TimeSeries(data_ta, data, POINT_AVERAGE_VALUE)
        orig_derivative_inside_inf = orig.derivative(derivative_method.BACKWARD).inside(-float('inf'), float('inf'), 0,
                                                                                        1, 0)

        def check_nan(ts):
            """Method that returns 1 for all timesteps that contain nan."""
            return ts.inside(-float('inf'), float('inf'), 1, 0, 0).values.to_numpy()

        np.testing.assert_equal(check_nan(orig + orig_derivative_inside_inf), [0., 0., 0., 0.],
                                'TimeSeries.inside() should not find any NaN-values in this TimeSeries')
Ejemplo n.º 28
0
 def test_gamma_snow_step(self):
     utc = Calendar()
     s = GammaSnowState()
     p = GammaSnowParameter()
     r = GammaSnowResponse()
     calc = GammaSnowCalculator()
     t = utc.time(2016,10,1)
     dt = deltahours(1)
     temp = 0.4
     rad = 12.0
     prec_mm_h = 0.3
     wind_speed=1.3
     rel_hum=0.4
     forest_fraction = 0.2
     altitude = 100.0
     # Just check that we don't get an error when stepping
     calc.step(s, r, t, dt, p, temp, rad, prec_mm_h, wind_speed, rel_hum, forest_fraction, altitude)
Ejemplo n.º 29
0
    def test_get_ts_info(self):
        """
        Verify we can get specific TsInfo objects for time-series from the server backend.
        """
        with tempfile.TemporaryDirectory() as c_dir:

            # start the server
            dtss = DtsServer()
            port_no = find_free_port()
            host_adr = 'localhost:{0}'.format(port_no)
            dtss.set_listening_port(port_no)
            dtss.set_container(
                "testing", c_dir
            )  # notice we set container 'test' to point to c_dir directory
            dtss.start_async(
            )  # the internal shyft time-series will be stored to that container

            # get a client
            client = DtsClient(host_adr)

            try:
                client.get_ts_info(r'shyft://testing/data')
            except Exception as e:
                pass
            else:
                # only end up here if no exceptions
                self.fail('Could fetch info for non-existing ts info')

            # setup some data
            utc = Calendar()
            d = deltahours(1)
            n = 365 * 24 // 3
            t = utc.time(2016, 1, 1)
            ta = TimeAxis(t, d, n)
            tsv = TsVector()
            pts = TimeSeries(ta, np.linspace(start=0, stop=1.0, num=ta.size()),
                             point_fx.POINT_AVERAGE_VALUE)
            tsv.append(TimeSeries(r'shyft://testing/data', pts))
            client.store_ts(tsv)

            info: TsInfo = client.get_ts_info(r'shyft://testing/data')

            self.assertEqual(info.name, r'data')
            self.assertEqual(info.point_fx, point_fx.POINT_AVERAGE_VALUE)
            self.assertEqual(info.data_period, ta.total_period())
Ejemplo n.º 30
0
def simple_run_demo():
    """Simple demo using HBV time-series and similar model-values

    """
    # 1. Setup the time-axis for our simulation
    normal_calendar = Calendar(3600) # we need UTC+1, since day-boundaries in day series in SmG is at UTC+1
    t_start = normal_calendar.time(2010, 9, 1) # we start at
    t_end   = normal_calendar.add(t_start,Calendar.YEAR,5) # 5 years period for simulation
    time_axis = Timeaxis(t_start, model_dt, normal_calendar.diff_units(t_start,t_end,model_dt))

    # 2. Create the shyft model from the HBV model-repository
    shyft_model = create_kjela_model(PTHSKModel, kjela.geo_ts_repository)

    # 3. establish the initial state
    # using the *pattern* of distribution after one year (so hbv-zone 1..10 get approximate distribution of discharge)
    #      *and* the observed discharge at the start time t_start
    #
    t_burnin = normal_calendar.add(t_start,Calendar.YEAR,1) # use one year to get distribution between hbvzones
    burnin_time_axis = Timeaxis(t_start, model_dt, normal_calendar.diff_units(t_start, t_burnin, model_dt))
    q_obs_m3s_ts = observed_kjela_discharge(time_axis.total_period()) # get out the observation ts
    q_obs_m3s_at_t_start= q_obs_m3s_ts(t_start) # get the m3/s at t_start
    initial_state = burn_in_state(shyft_model,burnin_time_axis, q_obs_m3s_at_t_start)

    # 4. now run the model with the established state
    #    that will start out with the burn in state
    shyft_model.run(time_axis, initial_state)

    # 5. display results etc. goes here
    plot_results(shyft_model, q_obs_m3s_ts)
    plt.show()
Ejemplo n.º 31
0
 def test_find_with_region_model_and_time_filter(self):
     cal = Calendar()
     region_model_id = "neanidelv-ptgsk"
     n_cells = 10
     tags = ["initial", "unverified"]
     state_vector = self._create_state_vector(n_cells)
     # now start state_repository test
     state_repository = YamlStateRepository(self._test_state_directory)
     # put in two states, record the unique state_id..
     state_id_1 = state_repository.put_state(region_model_id, cal.time(YMDhms(2001, 1, 1, 0, 0, 0)), state_vector,
                                             tags)
     state_id_2 = state_repository.put_state(region_model_id, cal.time(YMDhms(2001, 1, 2, 0, 0, 0)), state_vector,
                                             tags)
     all_states = state_repository.find_state()
     neanidelv_states = state_repository.find_state(region_model_id)
     self.assertEqual(2, len(all_states))
     self.assertEqual(2, len(neanidelv_states))
     most_recent_state_before_time = state_repository.find_state(region_model_id,
                                                                 cal.time(YMDhms(2001, 1, 1, 0, 0, 0)))
     self.assertEqual(1, len(most_recent_state_before_time))
     self.assertEqual(state_id_1, most_recent_state_before_time[0].state_id)
     self.assertEqual(0,
                      len(state_repository.find_state(region_model_id, cal.time(YMDhms(2000, 12, 31, 23, 59, 59)))))
     self.assertEqual(state_id_2,
                      state_repository.find_state(region_model_id, cal.time(YMDhms(2002, 1, 1, 0, 0, 0)))[
                          0].state_id)
Ejemplo n.º 32
0
 def test_easy_rating_curve_construct(self):
     utc = Calendar()
     rating_curve = RatingCurveParameters(RatingCurveTimeFunctions([
         RatingCurveTimeFunction(
             utc.time(1950, 3, 27), RatingCurveFunction(RatingCurveSegments([
                 RatingCurveSegment(lower=0.474, a=5.97489, b=-0.4745, c=2.36997)
             ]))),
         RatingCurveTimeFunction(
             utc.time(1968, 7, 29), RatingCurveFunction(RatingCurveSegments([
                 RatingCurveSegment(lower=0.25, a=2.9822, b=-0.45, c=1.5078),
                 RatingCurveSegment(lower=0.79, a=3.9513, b=-0.45, c=2.8087),
                 RatingCurveSegment(lower=1.38, a=5.7071, b=-0.45, c=2.3503),
                 RatingCurveSegment(lower=2.55, a=8.2672, b=-0.45, c=2.052)
             ])))
     ]))
     self.assertIsNotNone(rating_curve)
     flow = rating_curve.flow(utc.time(2018, 1, 1), 3.2)
     self.assertAlmostEqual(flow, 117.8103380205204)
Ejemplo n.º 33
0
     def test_get_ensemble_forecast_using_known_service_and_db_content(self):
         utc = Calendar() # always use Calendar() stuff
         met_stations=[ # this is the list of MetStations, the gis_id tells the position, the remaining tells us what properties we observe/forecast/calculate at the metstation (smg-ts)
             MetStationConfig(gis_id=598,temperature=u'/LTM5-Nea...........-T0017A3P_EC00_ENS',precipitation=u'/LTM5-Nea...........-T0000A5P_EC00_ENS')
         ]
         
         #note: the MetStationConfig can be constructed from yaml-config
         gis_location_repository=GisLocationService() # this provides the gis locations for my stations
         smg_ts_repository = SmGTsRepository(PROD,FC_PROD) # this provide the read function for my time-series
         n_ensembles=51
         ens_station_list=[
             EnsembleStation(598,n_ensembles,
                 temperature_ens=lambda i:u'/LTM5-Nea...........-T0017A3P_EC00_E{0:02}'.format(i),
                 precipitation_ens=lambda i:u'/LTM5-Nea...........-T0000A5P_EC00_E{0:02}'.format(i),
                 wind_speed_ens=None,
                 radiation_ens=None,
                 relative_humidity_ens=None
             ),
             EnsembleStation(574,n_ensembles,
                 temperature_ens=lambda i:u'/LTM5-Tya...........-T0017A3P_EC00_E{0:02}'.format(i),
                 precipitation_ens=lambda i:u'/LTM5-Tya...........-T0000A5P_EC00_E{0:02}'.format(i),
                 wind_speed_ens=None,
                 radiation_ens=None,
                 relative_humidity_ens=None
             )
         ]
         ens_config=EnsembleConfig(n_ensembles,ens_station_list)
         geo_ts_repository = GeoTsRepository(
             epsg_id=32633,
             geo_location_repository=gis_location_repository,
             ts_repository=smg_ts_repository,
             met_station_list=met_stations,
             ens_config=ens_config) #pass service info and met_stations
 
         self.assertIsNotNone(geo_ts_repository)
         utc_period = UtcPeriod(utc.time(YMDhms(2015, 10, 1, 0, 0, 0)),utc.time(YMDhms(2015, 10, 10, 0, 0, 0)))
         ts_types= ['temperature','precipitation']
         ens_geo_ts_dict = geo_ts_repository.get_forecast_ensemble(ts_types,utc_period=utc_period,t_c=None,geo_location_criteria=None)
         self.assertIsNotNone(ens_geo_ts_dict)
         self.assertEqual(ens_config.n_ensembles,len(ens_geo_ts_dict))
         for i in range(ens_config.n_ensembles):
             for ts_type in ts_types:
                 self.assertTrue(ts_type in ens_geo_ts_dict[i].keys(),"we ecpect to find an entry for each requested type (it could be empty list though")
                 self.assertTrue(len(ens_geo_ts_dict[i][ts_type])>0,"we expect to find the series that we pass in, given they have not changed the name in SmG PROD")
Ejemplo n.º 34
0
        def test_get_timeseries_using_known_service_and_db_content(self):
            utc = Calendar()  # always use Calendar() stuff
            met_stations = [  # this is the list of MetStations, the gis_id tells the position, the remaining tells us what properties we observe/forecast/calculate at the metstation (smg-ts)
                MetStationConfig(
                    gis_id=598,
                    temperature=u'/NeNi-Sylsjøen......-T0017V3KI0114',
                    precipitation=u'/NeNi-Sylsjøen-2....-T0000D9BI0124'),
                MetStationConfig(
                    gis_id=574,
                    temperature=u'/NeNi-Stuggusjøen...-T0017V3KI0114',
                    precipitation=u'/NeNi-Stuggusjøen...-T0000D9BI0124',
                    radiation=
                    u'/ENKI/STS/Radiation/Sim.-Stuggusjøen...-T0006A0B-0119')
            ]
            #note: the MetStationConfig can be constructed from yaml-config
            gis_location_repository = GisLocationService(
            )  # this provides the gis locations for my stations
            smg_ts_repository = SmGTsRepository(
                PROD,
                FC_PROD)  # this provide the read function for my time-series

            geo_ts_repository = GeoTsRepository(
                epsg_id=32633,
                geo_location_repository=gis_location_repository,
                ts_repository=smg_ts_repository,
                met_station_list=met_stations,
                ens_config=None)  #pass service info and met_stations

            self.assertIsNotNone(geo_ts_repository)
            utc_period = UtcPeriod(utc.time(YMDhms(2010, 1, 1, 0, 0, 0)),
                                   utc.time(YMDhms(2010, 1, 2, 0, 0, 0)))
            ts_types = ['temperature', 'precipitation', 'radiation']
            geo_ts_dict = geo_ts_repository.get_timeseries(
                ts_types, utc_period=utc_period, geo_location_criteria=None)
            self.assertIsNotNone(geo_ts_dict)
            for ts_type in ts_types:
                self.assertTrue(
                    ts_type in geo_ts_dict.keys(),
                    "we ecpect to find an entry for each requested type (it could be empty list though"
                )
                self.assertTrue(
                    len(geo_ts_dict[ts_type]) > 0,
                    "we expect to find the series that we pass in, given they have not changed the name in SmG PROD"
                )
Ejemplo n.º 35
0
    def test_raise_exception_when_no_data_in_request_period(self):
        utc_calendar = Calendar()
        netcdf_repository = self._construct_from_test_data()
        netcdf_repository.raise_if_no_data=True # yes, for now, just imagine this could work.
        self.assertIsNotNone(netcdf_repository)
        utc_period = UtcPeriod(utc_calendar.time(YMDhms(2017, 1, 1, 0, 0, 0)),# a period where there is no data in
                               utc_calendar.time(YMDhms(2020, 12, 31, 0, 0, 0)))# the file supplied
        type_source_map = dict()
        type_source_map['temperature'] = TemperatureSource

        #def test_function():
        #
        #    return netcdf_repository.get_timeseries(
        #                                        type_source_map,
        #                                        geo_location_criteria=None,
        #                                        utc_period=utc_period)

        #self.assertRaises(RuntimeError, test_function)
        self.assertRaises(RuntimeError, netcdf_repository.get_timeseries, type_source_map, **{'geo_location_criteria':None, 'utc_period':utc_period})
Ejemplo n.º 36
0
 def test_simple_case(self):
     utc = Calendar()
     t0 = utc.time(2018, 1, 1)
     dt = deltahours(1)
     dv = DoubleVector()
     dv[:] = [1.2, 0.0, 2.0, 5.0, 15.0,
              float('nan'),
              -1.0]  # these are bit-encoded values, note 1.2 -> 1.0
     i0_1_e = [1.0, 0.0, 0.0, 1.0, 1.0,
               float('nan'),
               float('nan')]  # expected values for bit 0 1-bit
     i1_3_e = [0.0, 0.0, 1.0, 2.0, 7.0,
               float('nan'),
               float('nan')]  # expected values for bit 3 3-bits
     ts = TimeSeries(TimeAxis(t0, dt, len(dv)), dv, POINT_AVERAGE_VALUE)
     i0_1 = ts.decode(start_bit=0, n_bits=1)
     i1_3 = ts.decode(start_bit=1, n_bits=3)
     assert_array_almost_equal(i0_1.values.to_numpy(), np.array(i0_1_e))
     assert_array_almost_equal(i1_3.values.to_numpy(), np.array(i1_3_e))
Ejemplo n.º 37
0
    def test_crudf_cycle(self):
        """
        Verify we can create, store, read, find and delete state in the state repository
        
        """

        # arrange, by creating one State
        cal = Calendar()
        utc_timestamp = cal.time(YMDhms(2001, 1, 1))
        region_model_id = "neanidelv-ptgsk"
        n_cells = 10
        tags = ["initial", "unverified"]
        state_vector = self._create_state_vector(n_cells)
        self.assertIsNotNone(state_vector, "we should have a valid state vector object at this spot")
        # now start state_repository test
        state_repository = YamlStateRepository(self._test_state_directory)
        # put in two states, record the unique state_id..
        state_id_1 = state_repository.put_state(region_model_id, utc_timestamp, state_vector, tags)
        state_id_2 = state_repository.put_state(region_model_id, utc_timestamp, state_vector, tags)
        # assert that we got two unique state_id
        self.assertIsNotNone(state_id_1, "We expect back a unique id")
        self.assertIsNotNone(state_id_2, "We expect back a unique id")
        self.assertNotEqual(state_id_1, state_id_2,
                            "storing two state, same model, same time, each state should be stored with a unique id")
        # now we should have two states in the repository
        state_infos = state_repository.find_state()
        self.assertEqual(2, len(state_infos), "We just stored two, expect two back..")
        # extra test, verify that we really stored the state (using kirchner q)
        state_1 = state_repository.get_state(state_id_1)
        self.assertEqual(n_cells, state_1.size(), "expect to get back state with same number of cells")
        for i in range(n_cells):
            self.assertAlmostEqual(state_1[i].kirchner.q, state_vector[i].kirchner.q, 3,
                                   "state repository should preserve state...")
        # now remove state
        state_repository.delete_state(state_id_1)
        # check that we got just one left, and that it is the correct one..
        state_list = state_repository.find_state()
        self.assertEqual(1, len(state_list))
        self.assertEqual(state_list[0].region_model_id, region_model_id)
        self.assertEqual(state_list[0].utc_timestamp, utc_timestamp)
        self.assertEqual(state_list[0].state_id, state_id_2)
Ejemplo n.º 38
0
 def test_glacier_melt_ts_m3s(self):
     utc = Calendar()
     t0 = utc.time(2016,10,1)
     dt = deltahours(1)
     n = 240
     ta = Timeaxis(t0, dt, n)
     area_m2 = 487*1000*1000  # Jostedalsbreen, largest i Europe
     temperature = Timeseries(ta=ta, fill_value=10.0, point_fx=fx_policy.POINT_AVERAGE_VALUE)
     sca_values = dv.from_numpy(np.linspace(area_m2*1.0,0.0,num=n))
     sca = Timeseries(ta=ta, values=sca_values, point_fx=fx_policy.POINT_AVERAGE_VALUE)
     gf = 1.0 *area_m2
     dtf = 6.0
     melt_m3s = create_glacier_melt_ts_m3s(temperature, sca, gf, dtf) # Here we get back a melt_ts, that we can do ts-stuff with
     self.assertIsNotNone(melt_m3s)
     full_melt_m3s = glacier_melt_step(dtf, 10.0, 0.0, gf)
     expected_melt_m3s = np.linspace(0.0,full_melt_m3s,num=n)
     assert_array_almost_equal(expected_melt_m3s,melt_m3s.values.to_numpy(),4)
     # Just to check we can work with the result as a ts in all aspects
     mx2 = melt_m3s*2.0
     emx2 = expected_melt_m3s * 2.0;
     assert_array_almost_equal(emx2, mx2.values.to_numpy(), 4)
Ejemplo n.º 39
0
     def test_get_ensemble_forecast_using_known_service_and_db_content(self):
         utc = Calendar()
         met_stations = [MetStationConfig(gis_id=598, temperature=u'/LTM5-Nea...........-T0017A3P_EC00_ENS', precipitation=u'/LTM5-Nea...........-T0000A5P_EC00_ENS')]
         gis_location_repository = GisLocationService()
         smg_ts_repository = SmGTsRepository(PREPROD, FC_PREPROD)
         n_ensembles = 51
         ens_station_list = [
             EnsembleStation(598, n_ensembles,
                 temperature_ens = lambda i:u'/LTM5-Nea...........-T0017A3P_EC00_E{0:02}'.format(i),
                 precipitation_ens = lambda i:u'/LTM5-Nea...........-T0000A5P_EC00_E{0:02}'.format(i),
                 wind_speed_ens = None,
                 radiation_ens = None,
                 relative_humidity_ens = None
             ),
             EnsembleStation(574, n_ensembles,
                 temperature_ens = lambda i:u'/LTM5-Tya...........-T0017A3P_EC00_E{0:02}'.format(i),
                 precipitation_ens = lambda i:u'/LTM5-Tya...........-T0000A5P_EC00_E{0:02}'.format(i),
                 wind_speed_ens = None,
                 radiation_ens = None,
                 relative_humidity_ens = None
             )
         ]
         ens_config = EnsembleConfig(n_ensembles, ens_station_list)
         geo_ts_repository = GeoTsRepository(
             epsg_id = 32633,
             geo_location_repository = gis_location_repository,
             ts_repository = smg_ts_repository,
             met_station_list = met_stations,
             ens_config = ens_config)
 
         self.assertIsNotNone(geo_ts_repository)
         utc_period = UtcPeriod(utc.time(YMDhms(2015, 10, 1, 0, 0, 0)),utc.time(YMDhms(2015, 10, 10, 0, 0, 0)))
         ts_types= ['temperature', 'precipitation']
         ens_geo_ts_dict = geo_ts_repository.get_forecast_ensemble(ts_types, utc_period=utc_period, t_c=None, geo_location_criteria=None)
         self.assertIsNotNone(ens_geo_ts_dict)
         self.assertEqual(ens_config.n_ensembles, len(ens_geo_ts_dict))
         for i in range(ens_config.n_ensembles):
             for ts_type in ts_types:
                 self.assertTrue(ts_type in ens_geo_ts_dict[i].keys(), "we expect to find an entry for each requested type (it could be empty list though)")
                 self.assertTrue(len(ens_geo_ts_dict[i][ts_type])>0, "we expect to find the series that we pass in by name in SmG PREPROD")
Ejemplo n.º 40
0
     def test_get_forecast_using_known_service_and_db_content(self):
         utc = Calendar()
         met_stations=[ # this is the list of MetStations, the gis_id tells the position, the remaining tells us what properties we observe/forecast/calculate at the metstation (smg-ts)
             MetStationConfig(gis_id=598, temperature=u'/LTM5-Nea...........-T0017A3P_EC00_ENS', precipitation=u'/LTM5-Nea...........-T0000A5P_EC00_ENS')
         ]
         gis_location_repository = GisLocationService()
         smg_ts_repository = SmGTsRepository(PREPROD, FC_PREPROD)
         geo_ts_repository = GeoTsRepository(
             epsg_id = 32633,
             geo_location_repository = gis_location_repository,
             ts_repository = smg_ts_repository,
             met_station_list = met_stations,
             ens_config = None)
 
         self.assertIsNotNone(geo_ts_repository)
         utc_period = UtcPeriod(utc.time(YMDhms(2015, 10, 1, 0, 0, 0)), utc.time(YMDhms(2015, 10, 10, 0, 0, 0)))
         ts_types= ['temperature', 'precipitation']
         geo_ts_dict = geo_ts_repository.get_forecast(ts_types, utc_period=utc_period, t_c=None, geo_location_criteria=None)
         self.assertIsNotNone(geo_ts_dict)
         for ts_type in ts_types:
             self.assertTrue(ts_type in geo_ts_dict.keys(), "we expect to find an entry for each requested type (it could be empty list though)")
             self.assertTrue(len(geo_ts_dict[ts_type])>0, "we expect to find the series that we pass in by name in SmG PREPROD")
Ejemplo n.º 41
0
def continuous_calibration():
    utc = Calendar()
    t_start = utc.time(YMDhms(2011, 9, 1))
    t_fc_start = utc.time(YMDhms(2015, 10, 1))
    dt = deltahours(1)
    n_obs = int(round((t_fc_start - t_start)/dt))
    obs_time_axis = Timeaxis(t_start, dt, n_obs + 1)
    q_obs_m3s_ts = observed_tistel_discharge(obs_time_axis.total_period())

    ptgsk = create_tistel_simulator(PTGSKOptModel, tistel.geo_ts_repository(tistel.grid_spec.epsg()))
    initial_state = burn_in_state(ptgsk, t_start, utc.time(YMDhms(2012, 9, 1)), q_obs_m3s_ts)

    num_opt_days = 30
    # Step forward num_opt_days days and store the state for each day:
    recal_start = t_start + deltahours(num_opt_days*24)
    t = t_start
    state = initial_state
    opt_states = {t: state}
    while t < recal_start:
        ptgsk.run(Timeaxis(t, dt, 24), state)
        t += deltahours(24)
        state = ptgsk.reg_model_state
        opt_states[t] = state

    recal_stop = utc.time(YMDhms(2011, 10, 30))
    recal_stop = utc.time(YMDhms(2012, 5, 30))
    curr_time = recal_start
    q_obs_avg = TsTransform().to_average(t_start, dt, n_obs + 1, q_obs_m3s_ts)
    target_spec = TargetSpecificationPts(q_obs_avg, IntVector([0]), 1.0, KLING_GUPTA)
    target_spec_vec = TargetSpecificationVector([target_spec])
    i = 0
    times = []
    values = []
    p, p_min, p_max = construct_calibration_parameters(ptgsk)
    while curr_time < recal_stop:
        print(i)
        i += 1
        opt_start = curr_time - deltahours(24*num_opt_days)
        opt_state = opt_states.pop(opt_start)
        p = ptgsk.region_model.get_region_parameter()
        p_opt = ptgsk.optimize(Timeaxis(opt_start, dt, 24*num_opt_days), opt_state, target_spec_vec,
                               p, p_min, p_max, tr_stop=1.0e-5)
        ptgsk.region_model.set_region_parameter(p_opt)
        corr_state = adjust_simulator_state(ptgsk, curr_time, q_obs_m3s_ts)
        ptgsk.run(Timeaxis(curr_time, dt, 24), corr_state)
        curr_time += deltahours(24)
        opt_states[curr_time] = ptgsk.reg_model_state
        discharge = ptgsk.region_model.statistics.discharge([0])
        times.extend(discharge.time(i) for i in range(discharge.size()))
        values.extend(list(np.array(discharge.v)))
    plt.plot(utc_to_greg(times), values)
    plot_results(None, q_obs=observed_tistel_discharge(UtcPeriod(recal_start, recal_stop)))
    set_calendar_formatter(Calendar())
    #plt.interactive(1)
    plt.title("Continuously recalibrated discharge vs observed")
    plt.xlabel("Time in UTC")
    plt.ylabel(r"Discharge in $\mathbf{m^3s^{-1}}$", verticalalignment="top", rotation="horizontal")
    plt.gca().yaxis.set_label_coords(0, 1.1)
Ejemplo n.º 42
0
def forecast_demo():
    """Simple forecast demo using arome data from met.no. Initial state
    is bootstrapped by simulating one hydrological year (starting
    Sept 1. 2011), and then calculating the state August 31. 2012. This
    state is then used as initial state for simulating Sept 1, 2011,
    after scaling with observed discharge. The validity of this approach
    is limited by the temporal variation of the spatial distribution of
    the discharge state, q, in the Kirchner method. The model is then
    stepped forward until Oct 1, 2015, and then used to compute the
    discharge for 65 hours using Arome data. At last, the results
    are plotted as simple timeseries.

    """
    utc = Calendar()
    t_start = utc.time(YMDhms(2011, 9, 1))
    t_fc_start = utc.time(YMDhms(2015, 10, 1))
    dt = deltahours(1)
    n_obs = int(round((t_fc_start - t_start)/dt))
    n_fc = 65
    obs_time_axis = Timeaxis(t_start, dt, n_obs)
    fc_time_axis = Timeaxis(t_fc_start, dt, n_fc)
    total_time_axis = Timeaxis(t_start, dt, n_obs + n_fc)
    q_obs_m3s_ts = observed_tistel_discharge(total_time_axis.total_period())
    ptgsk = create_tistel_simulator(PTGSKOptModel, tistel.geo_ts_repository(tistel.grid_spec.epsg()))
    initial_state = burn_in_state(ptgsk, t_start, utc.time(YMDhms(2012, 9, 1)), q_obs_m3s_ts)
    ptgsk.run(obs_time_axis, initial_state)
    plot_results(ptgsk, q_obs_m3s_ts)

    current_state = adjust_simulator_state(ptgsk, t_fc_start, q_obs_m3s_ts)

    ptgsk_fc = create_tistel_simulator(PTGSKModel, tistel.arome_repository(tistel.grid_spec, t_fc_start))
    ptgsk_fc.run(fc_time_axis, current_state)
    plt.figure()
    q_obs_m3s_ts = observed_tistel_discharge(fc_time_axis.total_period())
    plot_results(ptgsk_fc, q_obs_m3s_ts)
    #plt.interactive(1)
    plt.show()
Ejemplo n.º 43
0
     def test_get_timeseries_using_known_service_and_db_content(self):
         utc = Calendar() # always use Calendar() stuff
         met_stations=[ # this is the list of MetStations, the gis_id tells the position, the remaining tells us what properties we observe/forecast/calculate at the metstation (smg-ts)
             MetStationConfig(gis_id=598,temperature=u'/NeNi-Sylsjøen......-T0017V3KI0114',precipitation=u'/NeNi-Sylsjøen-2....-T0000D9BI0124'),
             MetStationConfig(gis_id=574,temperature=u'/NeNi-Stuggusjøen...-T0017V3KI0114',precipitation=u'/NeNi-Stuggusjøen...-T0000D9BI0124',radiation=u'/ENKI/STS/Radiation/Sim.-Stuggusjøen...-T0006A0B-0119')
         ]
         #note: the MetStationConfig can be constructed from yaml-config
         gis_location_repository=GisLocationService() # this provides the gis locations for my stations
         smg_ts_repository = SmGTsRepository(PROD,FC_PROD) # this provide the read function for my time-series
 
         geo_ts_repository = GeoTsRepository(epsg_id=32633,
             geo_location_repository=gis_location_repository,
             ts_repository=smg_ts_repository,
             met_station_list=met_stations,
             ens_config=None) #pass service info and met_stations
 
         self.assertIsNotNone(geo_ts_repository)
         utc_period = UtcPeriod(utc.time(YMDhms(2010, 1, 1, 0, 0, 0)),utc.time(YMDhms(2010, 1, 2, 0, 0, 0)))
         ts_types= ['temperature','precipitation','radiation']
         geo_ts_dict = geo_ts_repository.get_timeseries(ts_types,utc_period=utc_period,geo_location_criteria=None)
         self.assertIsNotNone(geo_ts_dict)
         for ts_type in ts_types:
             self.assertTrue(ts_type in geo_ts_dict.keys(),"we ecpect to find an entry for each requested type (it could be empty list though")
             self.assertTrue(len(geo_ts_dict[ts_type])>0,"we expect to find the series that we pass in, given they have not changed the name in SmG PROD")
Ejemplo n.º 44
0
        def test_run_observed_then_arome_and_store(self):
            """
              Start Tistel 2015.09.01, dummy state with some kirchner water
               use observations around Tistel (geo_ts_repository)
               and simulate forwared to 2015.10.01 (store discharge and catchment level precip/temp)
               then use arome forecast for 65 hours (needs arome for this period in arome-directory)
               finally store the arome results.

            """
            utc = Calendar()  # No offset gives Utc
            time_axis = Timeaxis(utc.time(YMDhms(2015, 9, 1, 0)), deltahours(1), 30 * 24)
            fc_time_axis = Timeaxis(utc.time(YMDhms(2015, 10, 1, 0)), deltahours(1), 65)

            interpolation_id = 0
            ptgsk = DefaultSimulator("Tistel-ptgsk",
                                     interpolation_id,
                                     self.region_model_repository,
                                     self.geo_ts_repository,
                                     self.interpolation_repository, None)
            n_cells = ptgsk.region_model.size()
            ptgsk_state = DefaultStateRepository(ptgsk.region_model.__class__, n_cells)

            ptgsk.region_model.set_state_collection(-1, True)  # collect state so we can inspect it
            s0 = ptgsk_state.get_state(0)
            for i in range(s0.size()):  # add some juice to get started
                s0[i].kirchner.q = 0.5

            ptgsk.run(time_axis, s0)

            print("Done simulation, testing that we can extract data from model")

            cids = api.IntVector()  # we pull out for all the catchments-id if it's empty
            model = ptgsk.region_model  # fetch out  the model
            sum_discharge = model.statistics.discharge(cids)
            self.assertIsNotNone(sum_discharge)
            avg_temperature = model.statistics.temperature(cids)
            avg_precipitation = model.statistics.precipitation(cids)
            self.assertIsNotNone(avg_precipitation)
            self.assertIsNotNone(avg_temperature)
            for time_step in range(time_axis.size()):
                precip_raster = model.statistics.precipitation(cids, time_step)  # example raster output
                self.assertEqual(precip_raster.size(), n_cells)
            avg_gs_lwc = model.gamma_snow_state.lwc(cids)  # sca skaugen|gamma
            self.assertIsNotNone(avg_gs_lwc)
            # lwc surface_heat alpha melt_mean melt iso_pot_energy temp_sw
            avg_gs_output = model.gamma_snow_response.outflow(cids)
            self.assertIsNotNone(avg_gs_output)
            print("done. now save to db")
            # SmGTsRepository(PROD,FC_PROD)
            save_list = [
                TsStoreItem(u'/test/x/shyft/tistel/discharge_m3s', lambda m: m.statistics.discharge(cids)),
                TsStoreItem(u'/test/x/shyft/tistel/temperature', lambda m: m.statistics.temperature(cids)),
                TsStoreItem(u'/test/x/shyft/tistel/precipitation', lambda m: m.statistics.precipitation(cids)),
            ]

            tss = TimeseriesStore(SmGTsRepository(PREPROD, FC_PREPROD), save_list)

            self.assertTrue(tss.store_ts(ptgsk.region_model))

            print("Run forecast arome")
            endstate = ptgsk.region_model.state_t.vector_t()
            ptgsk.region_model.get_states(endstate)  # get the state at end of obs
            ptgsk.geo_ts_repository = self.arome_repository  # switch to arome here
            ptgsk.run_forecast(fc_time_axis, fc_time_axis.start, endstate)  # now forecast
            print("Done forecast")
            fc_save_list = [
                TsStoreItem(u'/test/x/shyft/tistel/fc_discharge_m3s', lambda m: m.statistics.discharge(cids)),
                TsStoreItem(u'/test/x/shyft/tistel/fc_temperature', lambda m: m.statistics.temperature(cids)),
                TsStoreItem(u'/test/x/shyft/tistel/fc_precipitation', lambda m: m.statistics.precipitation(cids)),
                TsStoreItem(u'/test/x/shyft/tistel/fc_radiation', lambda m: m.statistics.radiation(cids)),
                TsStoreItem(u'/test/x/shyft/tistel/fc_rel_hum', lambda m: m.statistics.rel_hum(cids)),
                TsStoreItem(u'/test/x/shyft/tistel/fc_wind_speed', lambda m: m.statistics.wind_speed(cids)),

            ]
            TimeseriesStore(SmGTsRepository(PREPROD, FC_PREPROD), fc_save_list).store_ts(ptgsk.region_model)
            print("Done save to db")
Ejemplo n.º 45
0
 def test_utctime_from_datetime(self):
     utc = Calendar()
     dt1 = dt.datetime(2015, 6, 1, 2, 3, 4)
     t1 = utctime_from_datetime(dt1)
     self.assertEqual(t1, utc.time(2015, 6, 1, 2, 3, 4))