Ejemplo n.º 1
0
def plot_results(ptgsk, q_obs=None):
    h_obs = None
    if ptgsk is not None:
        plt.subplot(3, 1, 1)
        discharge = ptgsk.region_model.statistics.discharge([0])
        temp = ptgsk.region_model.statistics.temperature([0])
        precip = ptgsk.region_model.statistics.precipitation([0])
        # Results on same time axis, so we only need one
        times = utc_to_greg(
            [discharge.time(i) for i in range(discharge.size())])
        plt.plot(times, np.array(discharge.v))
        plt.gca().set_xlim(times[0], times[-1])
        plt.ylabel(r"Discharge in $\mathbf{m^3s^{-1}}$")
        set_calendar_formatter(Calendar())
    if q_obs is not None:
        obs_times = utc_to_greg([q_obs.time(i) for i in range(q_obs.size())])
        ovs = [q_obs.value(i) for i in range(q_obs.size())]
        h_obs, = plt.plot(obs_times, ovs, linewidth=2, color='k')
        ax = plt.gca()
        ax.set_xlim(obs_times[0], obs_times[-1])
    if ptgsk is not None:
        plt.subplot(3, 1, 2)
        plt.plot(times, np.array(temp.v))
        set_calendar_formatter(Calendar())
        plt.gca().set_xlim(times[0], times[-1])
        plt.ylabel(r"Temperature in C")
        plt.subplot(3, 1, 3)
        plt.plot(times, np.array(precip.v))
        set_calendar_formatter(Calendar())
        plt.gca().set_xlim(times[0], times[-1])
        plt.ylabel(r"Precipitation in mm")
    return h_obs
Ejemplo n.º 2
0
def continuous_calibration():
    utc = Calendar()
    t_start = utc.time(YMDhms(2011, 9, 1))
    t_fc_start = utc.time(YMDhms(2015, 10, 1))
    dt = deltahours(1)
    n_obs = int(round((t_fc_start - t_start)/dt))
    obs_time_axis = TimeAxisFixedDeltaT(t_start, dt, n_obs + 1)
    q_obs_m3s_ts = observed_tistel_discharge(obs_time_axis.total_period())

    ptgsk = create_tistel_simulator(PTGSKOptModel, tistel.geo_ts_repository(tistel.grid_spec.epsg()))
    initial_state = burn_in_state(ptgsk, t_start, utc.time(YMDhms(2012, 9, 1)), q_obs_m3s_ts)

    num_opt_days = 30
    # Step forward num_opt_days days and store the state for each day:
    recal_start = t_start + deltahours(num_opt_days*24)
    t = t_start
    state = initial_state
    opt_states = {t: state}
    while t < recal_start:
        ptgsk.run(TimeAxisFixedDeltaT(t, dt, 24), state)
        t += deltahours(24)
        state = ptgsk.reg_model_state
        opt_states[t] = state

    recal_stop = utc.time(YMDhms(2011, 10, 30))
    recal_stop = utc.time(YMDhms(2012, 5, 30))
    curr_time = recal_start
    q_obs_avg = TsTransform().to_average(t_start, dt, n_obs + 1, q_obs_m3s_ts)
    target_spec = TargetSpecificationPts(q_obs_avg, IntVector([0]), 1.0, KLING_GUPTA)
    target_spec_vec = TargetSpecificationVector([target_spec])
    i = 0
    times = []
    values = []
    p, p_min, p_max = construct_calibration_parameters(ptgsk)
    while curr_time < recal_stop:
        print(i)
        i += 1
        opt_start = curr_time - deltahours(24*num_opt_days)
        opt_state = opt_states.pop(opt_start)
        p = ptgsk.region_model.get_region_parameter()
        p_opt = ptgsk.optimize(TimeAxisFixedDeltaT(opt_start, dt, 24*num_opt_days), opt_state, target_spec_vec,
                               p, p_min, p_max, tr_stop=1.0e-5)
        ptgsk.region_model.set_region_parameter(p_opt)
        corr_state = adjust_simulator_state(ptgsk, curr_time, q_obs_m3s_ts)
        ptgsk.run(TimeAxisFixedDeltaT(curr_time, dt, 24), corr_state)
        curr_time += deltahours(24)
        opt_states[curr_time] = ptgsk.reg_model_state
        discharge = ptgsk.region_model.statistics.discharge([0])
        times.extend(discharge.time(i) for i in range(discharge.size()))
        values.extend(list(np.array(discharge.v)))
    plt.plot(utc_to_greg(times), values)
    plot_results(None, q_obs=observed_tistel_discharge(UtcPeriod(recal_start, recal_stop)))
    set_calendar_formatter(Calendar())
    #plt.interactive(1)
    plt.title("Continuously recalibrated discharge vs observed")
    plt.xlabel("Time in UTC")
    plt.ylabel(r"Discharge in $\mathbf{m^3s^{-1}}$", verticalalignment="top", rotation="horizontal")
    plt.gca().yaxis.set_label_coords(0, 1.1)
Ejemplo n.º 3
0
    def test_fixed_tsv_empty(self) -> None:
        """Test that an empty TsVector is generated by fixed_tsv when given an empty sequence of values."""
        cal = Calendar()
        period = UtcPeriod(cal.time(2017, 1, 1), cal.time(2018, 1, 1))

        tsv = fixed_tsv(period, [])
        self.assertEqual(len(tsv), 0)
Ejemplo n.º 4
0
def ensemble_demo():
    utc = Calendar()
    t_start = utc.time(YMDhms(2011, 9, 1))
    t_fc_ens_start = utc.time(YMDhms(2015, 7, 26))
    disp_start = utc.time(YMDhms(2015, 7, 20))
    dt = deltahours(1)
    n_obs = int(round((t_fc_ens_start - t_start) / dt))
    n_fc_ens = 30
    n_disp = int(round(t_fc_ens_start - disp_start) / dt) + n_fc_ens + 24 * 7

    obs_time_axis = Timeaxis(t_start, dt, n_obs + 1)
    fc_ens_time_axis = Timeaxis(t_fc_ens_start, dt, n_fc_ens)
    display_time_axis = Timeaxis(disp_start, dt, n_disp)

    q_obs_m3s_ts = observed_tistel_discharge(obs_time_axis.total_period())
    ptgsk = create_tistel_simulator(
        PTGSKOptModel, tistel.geo_ts_repository(tistel.grid_spec.epsg()))
    initial_state = burn_in_state(ptgsk, t_start, utc.time(YMDhms(2012, 9, 1)),
                                  q_obs_m3s_ts)

    ptgsk.run(obs_time_axis, initial_state)
    current_state = adjust_simulator_state(ptgsk, t_fc_ens_start, q_obs_m3s_ts)
    q_obs_m3s_ts = observed_tistel_discharge(display_time_axis.total_period())
    ens_repos = tistel.arome_ensemble_repository(tistel.grid_spec)
    ptgsk_fc_ens = create_tistel_simulator(PTGSKModel, ens_repos)
    sims = ptgsk_fc_ens.create_ensembles(fc_ens_time_axis, t_fc_ens_start,
                                         current_state)
    for sim in sims:
        sim.simulate()
    plt.hold(1)
    percentiles = [10, 25, 50, 75, 90]
    plot_percentiles(sims, percentiles, obs=q_obs_m3s_ts)
    plt.interactive(1)
    plt.show()
Ejemplo n.º 5
0
 def test_find_with_region_model_and_time_filter(self):
     cal = Calendar()
     region_model_id = "neanidelv-ptgsk"
     n_cells = 10
     tags = ["initial", "unverified"]
     state_vector = self._create_state_vector(n_cells)
     # now start state_repository test
     state_repository = YamlStateRepository(self._test_state_directory)
     # put in two states, record the unique state_id..
     state_id_1 = state_repository.put_state(region_model_id, cal.time(YMDhms(2001, 1, 1, 0, 0, 0)), state_vector,
                                             tags)
     state_id_2 = state_repository.put_state(region_model_id, cal.time(YMDhms(2001, 1, 2, 0, 0, 0)), state_vector,
                                             tags)
     all_states = state_repository.find_state()
     neanidelv_states = state_repository.find_state(region_model_id)
     self.assertEqual(2, len(all_states))
     self.assertEqual(2, len(neanidelv_states))
     most_recent_state_before_time = state_repository.find_state(region_model_id,
                                                                 cal.time(YMDhms(2001, 1, 1, 0, 0, 0)))
     self.assertEqual(1, len(most_recent_state_before_time))
     self.assertEqual(state_id_1, most_recent_state_before_time[0].state_id)
     self.assertEqual(0,
                      len(state_repository.find_state(region_model_id, cal.time(YMDhms(2000, 12, 31, 23, 59, 59)))))
     self.assertEqual(state_id_2,
                      state_repository.find_state(region_model_id, cal.time(YMDhms(2002, 1, 1, 0, 0, 0)))[
                          0].state_id)
Ejemplo n.º 6
0
def plot_percentiles(sim, percentiles, obs=None):
    discharges = [s.region_model.statistics.discharge([0]) for s in sim]
    times = utc_to_greg(np.array([discharges[0].time(i) for i in range(discharges[0].size())], dtype='d'))
    all_discharges = np.array([d.v for d in discharges])
    perc_arrs = [a for a in np.percentile(all_discharges, percentiles, 0)]
    h, fill_handles = plot_np_percentiles(times, perc_arrs, base_color=(51/256, 102/256, 193/256))
    percentile_texts = ["{} - {}".format(percentiles[i], percentiles[-(i + 1)]) for i in range(len(percentiles)//2)]
    ax = plt.gca()
    maj_loc = AutoDateLocator(tz=pytz.UTC, interval_multiples=True)
    ax.xaxis.set_major_locator(maj_loc)
    set_calendar_formatter(Calendar())
    if len(percentiles) % 2:
        fill_handles.append(h[0])
        percentile_texts.append("{}".format(percentiles[len(percentiles)//2]))
    if obs is not None:
        h_obs = plot_results(None, obs)
        fill_handles.append(h_obs)
        percentile_texts.append("Observed")

    ax.legend(fill_handles, percentile_texts)
    ax.grid(b=True, color=(51/256, 102/256, 193/256), linewidth=0.1, linestyle='-', axis='y')
    plt.xlabel("Time in UTC")
    plt.ylabel(r"Discharge in $\mathbf{m^3s^{-1}}$", verticalalignment="top", rotation="horizontal")
    ax.yaxis.set_label_coords(0, 1.1)
    return h, ax
Ejemplo n.º 7
0
 def test_glacier_melt_ts_m3s(self):
     utc = Calendar()
     t0 = utc.time(2016, 10, 1)
     dt = deltahours(1)
     n = 240
     ta = TimeAxis(t0, dt, n)
     area_m2 = 487 * 1000 * 1000  # Jostedalsbreen, largest i Europe
     temperature = TimeSeries(ta=ta,
                              fill_value=10.0,
                              point_fx=fx_policy.POINT_AVERAGE_VALUE)
     sca_values = dv.from_numpy(np.linspace(area_m2 * 1.0, 0.0, num=n))
     sca = TimeSeries(ta=ta,
                      values=sca_values,
                      point_fx=fx_policy.POINT_AVERAGE_VALUE)
     gf = 1.0 * area_m2
     dtf = 6.0
     melt_m3s = create_glacier_melt_ts_m3s(
         temperature, sca, gf,
         dtf)  # Here we get back a melt_ts, that we can do ts-stuff with
     self.assertIsNotNone(melt_m3s)
     full_melt_m3s = glacier_melt_step(dtf, 10.0, 0.0, gf)
     expected_melt_m3s = np.linspace(0.0, full_melt_m3s, num=n)
     assert_array_almost_equal(expected_melt_m3s,
                               melt_m3s.values.to_numpy(), 4)
     # Just to check we can work with the result as a ts in all aspects
     mx2 = melt_m3s * 2.0
     emx2 = expected_melt_m3s * 2.0
     assert_array_almost_equal(emx2, mx2.values.to_numpy(), 4)
Ejemplo n.º 8
0
    def test_windowed_percentiles_tsv_values(self) -> None:
        """Test that a TsVector is generated by windowed_percentiles_tsv with time-series
        fulfilling some properties of being percentiles of the data ts."""
        cal = Calendar()
        period = UtcPeriod(cal.time(2017, 1, 1), cal.time(2018, 1, 1))

        data = np.linspace(-2, 2, 24 * 7)
        data_ts = TimeSeries(TimeAxis(0, Calendar.HOUR, len(data)), data,
                             POINT_INSTANT_VALUE)

        # compute
        percentiles = [0, 10, 50, 90, 100]
        tsv = windowed_percentiles_tsv(data_ts, period, 3 * Calendar.HOUR,
                                       12 * Calendar.HOUR, percentiles,
                                       self.client, cal)
        self.assertEqual(len(tsv), 5)

        # assert that the time-series have the correct properties for being percentile series
        for i in range(len(tsv[0])):
            prev_v = tsv[0].values[i]
            for j in range(len(percentiles) - 1):
                v = tsv[j + 1].values[i]
                # both values will be NaN at the end - that is ok
                if math.isnan(prev_v) and math.isnan(v):
                    continue
                # check that no larger percentile have values greater than lower percentiles
                self.assertLessEqual(prev_v, v)
                prev_v = v
Ejemplo n.º 9
0
    def test_error_handling(self):
        utc = Calendar()
        t0 = utc.time(2018, 1, 1)
        dt = deltahours(1)
        dv = DoubleVector()
        dv[:] = [1.0, 2.0, 2.5, 1.9, 3.0, 3.1,
                 float('nan')]  # also verify nan-handling
        ts = TimeSeries(TimeAxis(t0, dt, len(dv)), dv, POINT_AVERAGE_VALUE)
        try:
            ts.decode(start_bit=0, n_bits=0)
            self.assertTrue(False, 'This should throw, n_bits >0')
        except RuntimeError as re:
            pass

        try:
            ts.decode(start_bit=41, n_bits=12)
            self.assertTrue(False, 'This should throw, start_bit + n_bits >52')
        except RuntimeError as re:
            pass

        try:
            ts.decode(start_bit=-1, n_bits=12)
            self.assertTrue(False, 'This should throw, start_bit >=0')
        except RuntimeError as re:
            pass
Ejemplo n.º 10
0
def simple_run_demo():
    """Simple demo using HBV time-series and similar model-values

    """
    # 1. Setup the time-axis for our simulation
    normal_calendar = Calendar(3600) # we need UTC+1, since day-boundaries in day series in SmG is at UTC+1
    t_start = normal_calendar.time(2010, 9, 1) # we start at
    t_end   = normal_calendar.add(t_start,Calendar.YEAR,5) # 5 years period for simulation
    time_axis = Timeaxis(t_start, model_dt, normal_calendar.diff_units(t_start,t_end,model_dt))

    # 2. Create the shyft model from the HBV model-repository
    shyft_model = create_kjela_model(PTHSKModel, kjela.geo_ts_repository)

    # 3. establish the initial state
    # using the *pattern* of distribution after one year (so hbv-zone 1..10 get approximate distribution of discharge)
    #      *and* the observed discharge at the start time t_start
    #
    t_burnin = normal_calendar.add(t_start,Calendar.YEAR,1) # use one year to get distribution between hbvzones
    burnin_time_axis = Timeaxis(t_start, model_dt, normal_calendar.diff_units(t_start, t_burnin, model_dt))
    q_obs_m3s_ts = observed_kjela_discharge(time_axis.total_period()) # get out the observation ts
    q_obs_m3s_at_t_start= q_obs_m3s_ts(t_start) # get the m3/s at t_start
    initial_state = burn_in_state(shyft_model,burnin_time_axis, q_obs_m3s_at_t_start)

    # 4. now run the model with the established state
    #    that will start out with the burn in state
    shyft_model.run(time_axis, initial_state)

    # 5. display results etc. goes here
    plot_results(shyft_model, q_obs_m3s_ts)
    plt.show()
Ejemplo n.º 11
0
 def test_forecast_average_slice(self):
     """
     Demo and test TsVector.average_slice(lead_time,dt,n)
     """
     utc = Calendar()
     t0 = utc.time(2017, 1, 1)
     dt = deltahours(1)
     n = 66  # typical arome
     fc_dt_n_hours = 6
     fc_dt = deltahours(fc_dt_n_hours)
     fc_n = 4 * 10  # 4 each day 10 days
     fc_v = self._create_forecasts(t0, dt, n, fc_dt, fc_n)
     for lead_time_hours in range(12):
         for slice_length_units in [1, 2, 3, 4, 6, 12]:
             for dt_hours in [1, 2, 3]:
                 slice_v = fc_v.average_slice(deltahours(lead_time_hours),
                                              deltahours(dt_hours),
                                              slice_length_units)
                 self.assertEqual(len(slice_v), len(fc_v))
                 # then loop over the slice_v and prove it's equal
                 # to the average of the same portion on the originalj
                 for s, f in zip(slice_v, fc_v):
                     ta = TimeAxis(
                         f.time_axis.time(0) + deltahours(lead_time_hours),
                         deltahours(dt_hours), slice_length_units)
                     ts_expected = f.average(ta)
                     self.assertTrue(s.time_axis == ts_expected.time_axis)
                     self.assertTrue(
                         np.allclose(s.values.to_numpy(),
                                     ts_expected.values.to_numpy()))
     pass
Ejemplo n.º 12
0
    def test_raise_exception_when_no_data_in_request_period(self):
        utc_calendar = Calendar()
        netcdf_repository = self._construct_from_test_data()
        netcdf_repository.raise_if_no_data = True  # yes, for now, just imagine this could work.
        self.assertIsNotNone(netcdf_repository)
        utc_period = UtcPeriod(
            utc_calendar.time(YMDhms(2017, 1, 1, 0, 0,
                                     0)),  # a period where there is no data in
            utc_calendar.time(YMDhms(2020, 12, 31, 0, 0,
                                     0)))  # the file supplied
        type_source_map = dict()
        type_source_map['temperature'] = TemperatureSource

        #def test_function():
        #
        #    return netcdf_repository.get_timeseries(
        #                                        type_source_map,
        #                                        geo_location_criteria=None,
        #                                        utc_period=utc_period)

        #self.assertRaises(RuntimeError, test_function)
        self.assertRaises(
            RuntimeError, netcdf_repository.get_timeseries, type_source_map,
            **{
                'geo_location_criteria': None,
                'utc_period': utc_period
            })
Ejemplo n.º 13
0
    def test_dtss_partition_by_average(self):
        """
        This test illustrates use of partition_by client and server-side.
        The main point here is to ensure that the evaluate period covers
        both the historical and evaluation peri
        """
        with tempfile.TemporaryDirectory() as c_dir:
            # setup data to be calculated
            utc = Calendar()
            d = deltahours(1)
            t = utc.time(2000, 1, 1)
            n = utc.diff_units(t, utc.add(t, Calendar.YEAR, 10), d)
            ta = TimeAxis(t, d, n)
            td = TimeAxis(t, d * 24, n // 24)
            n_ts = 1
            store_tsv = TsVector()  # something we store at server side
            for i in range(n_ts):
                pts = TimeSeries(
                    ta,
                    np.sin(
                        np.linspace(start=0, stop=1.0 * (i + 1),
                                    num=ta.size())),
                    point_fx.POINT_AVERAGE_VALUE)
                ts_id = shyft_store_url(f"{i}")
                store_tsv.append(TimeSeries(
                    ts_id, pts))  # generate a bound pts to store

            # start dtss server
            dtss = DtsServer()
            cache_on_write = True
            port_no = find_free_port()
            host_port = 'localhost:{0}'.format(port_no)
            dtss.set_auto_cache(True)
            dtss.set_listening_port(port_no)
            dtss.set_container(
                "test", c_dir
            )  # notice we set container 'test' to point to c_dir directory
            dtss.start_async(
            )  # the internal shyft time-series will be stored to that container

            # create dts client
            c = DtsClient(
                host_port,
                auto_connect=False)  # demonstrate object life-time connection
            c.store_ts(store_tsv,
                       overwrite_on_write=True,
                       cache_on_write=cache_on_write)

            t_0 = utc.time(2018, 1, 1)
            tax = TimeAxis(t_0, Calendar.DAY, 365)
            ts_h1 = TimeSeries(shyft_store_url(f'{0}'))
            ts_h2 = store_tsv[0]
            ts_p1 = ts_h1.partition_by(utc, t, Calendar.YEAR, 10,
                                       t_0).average(tax)
            ts_p2 = ts_h2.partition_by(utc, t, Calendar.YEAR, 10,
                                       t_0).average(tax)
Ejemplo n.º 14
0
 def test_simple_case(self):
     utc = Calendar()
     t0 = utc.time(2018, 1, 1)
     dt = deltahours(1)
     dv = DoubleVector()
     dv[:] = [1.0, 2.0, 2.5, 1.9, 3.0, 3.1, -1.0]
     i1_ex = [0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0]
     ts = TimeSeries(TimeAxis(t0, dt, len(dv)), dv, POINT_AVERAGE_VALUE)
     i1 = ts.inside(2.0, 3.0)
     assert_array_almost_equal(i1.values.to_numpy(), np.array(i1_ex))
Ejemplo n.º 15
0
 def test_inverted_values(self):
     utc = Calendar()
     t0 = utc.time(2018, 1, 1)
     dt = deltahours(1)
     dv = DoubleVector()
     dv[:] = [1.0, 2.0, 2.5, 1.9, 3.0, 3.1, float('nan')]  # also verify nan-handling
     i1_ex = [1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0]
     ts = TimeSeries(TimeAxis(t0, dt, len(dv)), dv, POINT_AVERAGE_VALUE)
     i2 = ts.inside(min_v=2.0, max_v=3.0, nan_v=1.0, inside_v=0.0, outside_v=1.0)
     assert_array_almost_equal(i2.values.to_numpy(), np.array(i1_ex))
Ejemplo n.º 16
0
    def test_fixed_tsv_values(self) -> None:
        """Test that a TsVector with fixed constant values is generated by fixed_tsv when given
        a sequence of values."""
        cal = Calendar()
        period = UtcPeriod(cal.time(2017, 1, 1), cal.time(2018, 1, 1))

        values = [12, 15.5]
        tsv = fixed_tsv(period, values)
        self.assertEqual(len(tsv), 2)
        for v, ts in zip(values, tsv):
            for ts_v in ts.values:
                self.assertEqual(ts_v, v)
Ejemplo n.º 17
0
    def test_dtss_remove_series(self):
        with tempfile.TemporaryDirectory() as c_dir:

            # start the server
            dtss = DtsServer()
            port_no = find_free_port()
            host_port = 'localhost:{0}'.format(port_no)
            dtss.set_listening_port(port_no)
            dtss.set_container(
                "test", c_dir
            )  # notice we set container 'test' to point to c_dir directory
            dtss.start_async(
            )  # the internal shyft time-series will be stored to that container

            # setup some data
            utc = Calendar()
            d = deltahours(1)
            n = 365 * 24 // 3
            t = utc.time(2016, 1, 1)
            ta = TimeAxis(t, d, n)
            tsv = TsVector()
            pts = TimeSeries(ta, np.linspace(start=0, stop=1.0, num=ta.size()),
                             point_fx.POINT_AVERAGE_VALUE)
            tsv.append(TimeSeries("cache://test/foo", pts))

            # get a client
            client = DtsClient(host_port)
            client.store_ts(tsv)

            # start with no removing
            dtss.set_can_remove(False)

            # we should be disallowed to remove now
            try:
                client.remove("shyft://test/foo")
            except Exception as err:
                self.assertEqual(
                    str(err), "dtss::server: server does not support removing")

            # then try with allowing remove
            dtss.set_can_remove(True)

            # we only support removing shyft-url style data
            try:
                client.remove("protocol://test/foo")
            except Exception as err:
                self.assertEqual(
                    str(err),
                    "dtss::server: server does not allow removing for non shyft-url type data"
                )

            # now it should work
            client.remove("shyft://test/foo")
Ejemplo n.º 18
0
 def test_construct_repository(self):
     utc_calendar = Calendar()
     netcdf_repository = self._construct_from_test_data()
     self.assertIsNotNone(netcdf_repository)
     utc_period = UtcPeriod(
         utc_calendar.time(YMDhms(2005, 1, 1, 0, 0, 0)),
         utc_calendar.time(YMDhms(2014, 12, 31, 0, 0, 0)))
     type_source_map = dict()
     type_source_map['temperature'] = TemperatureSource
     geo_ts_dict = netcdf_repository.get_timeseries(
         type_source_map, geo_location_criteria=None, utc_period=utc_period)
     self.assertIsNotNone(geo_ts_dict)
Ejemplo n.º 19
0
    def test_crudf_cycle(self):
        """
        Verify we can create, store, read, find and delete state in the state repository
        
        """

        # arrange, by creating one State
        cal = Calendar()
        utc_timestamp = cal.time(2001, 1, 1)
        region_model_id = "neanidelv-ptgsk"
        n_cells = 10
        tags = ["initial", "unverified"]
        state_vector = self._create_state_vector(n_cells)
        self.assertIsNotNone(
            state_vector,
            "we should have a valid state vector object at this spot")
        # now start state_repository test
        state_repository = YamlStateRepository(
            directory_path=self._test_state_directory,
            state_serializer=StateSerializer(PTGSKStateWithIdVector))
        # put in two states, record the unique state_id..
        state_id_1 = state_repository.put_state(region_model_id, utc_timestamp,
                                                state_vector, tags)
        state_id_2 = state_repository.put_state(region_model_id, utc_timestamp,
                                                state_vector, tags)
        # assert that we got two unique state_id
        self.assertIsNotNone(state_id_1, "We expect back a unique id")
        self.assertIsNotNone(state_id_2, "We expect back a unique id")
        self.assertNotEqual(
            state_id_1, state_id_2,
            "storing two state, same model, same time, each state should be stored with a unique id"
        )
        # now we should have two states in the repository
        state_infos = state_repository.find_state()
        self.assertEqual(2, len(state_infos),
                         "We just stored two, expect two back..")
        # extra test, verify that we really stored the state (using kirchner q)
        state_1 = state_repository.get_state(state_id_1)
        self.assertEqual(n_cells, len(state_1),
                         "expect to get back state with same number of cells")
        for i in range(n_cells):
            self.assertAlmostEqual(
                state_1[i].state.kirchner.q, state_vector[i].state.kirchner.q,
                3, "state repository should preserve state...")
        # now remove state
        state_repository.delete_state(state_id_1)
        # check that we got just one left, and that it is the correct one..
        state_list = state_repository.find_state()
        self.assertEqual(1, len(state_list))
        self.assertEqual(state_list[0].region_model_id, region_model_id)
        self.assertEqual(state_list[0].utc_timestamp, utc_timestamp)
        self.assertEqual(state_list[0].state_id, state_id_2)
Ejemplo n.º 20
0
 def test_convolve_policy(self):
     utc = Calendar()
     ts = TimeSeries(ta=TimeAxisFixedDeltaT(utc.time(2001, 1, 1),
                                            deltahours(1), 24),
                     fill_value=10.0,
                     point_fx=point_fx.POINT_AVERAGE_VALUE)
     w = DoubleVector.from_numpy([0.05, 0.15, 0.6, 0.15, 0.05])
     cts = ts.convolve_w(w, convolve_policy.USE_FIRST
                         )  # ensure mass-balance between source and cts
     self.assertIsNotNone(cts)
     self.assertEquals(len(cts), len(ts))
     self.assertEquals(cts.values.to_numpy().sum(),
                       ts.values.to_numpy().sum())
 def test_returns_empty_ts_when_no_data_in_request_period(self):
     utc_calendar = Calendar()
     netcdf_repository = self._construct_from_test_data()
     self.assertIsNotNone(netcdf_repository)
     utc_period = UtcPeriod(
         utc_calendar.time(2017, 1, 1, 0, 0,
                           0),  # a period where there is no data in
         utc_calendar.time(2020, 12, 31, 0, 0, 0))  # the file supplied
     type_source_map = dict()
     type_source_map['temperature'] = TemperatureSource
     geo_ts_dict = netcdf_repository.get_timeseries(
         type_source_map, geo_location_criteria=None, utc_period=utc_period)
     self.assertIsNotNone(geo_ts_dict)
Ejemplo n.º 22
0
    def test_windowed_percentiles_tsv_empty(self) -> None:
        """Test that an empty TsVector is generated by windowed_percentiles_tsv
        when given an empty sequence of percentiles."""
        cal = Calendar()
        period = UtcPeriod(cal.time(2017, 1, 1), cal.time(2018, 1, 1))

        data = np.linspace(-2, 2, 24 * 7)
        data_ts = TimeSeries(TimeAxis(0, Calendar.HOUR, len(data)), data,
                             POINT_INSTANT_VALUE)

        # compute
        tsv = windowed_percentiles_tsv(data_ts, period, Calendar.HOUR,
                                       Calendar.HOUR, [], self.client, cal)
        self.assertEqual(len(tsv), 0)
Ejemplo n.º 23
0
 def test_hbv_snow_step(self):
     utc = Calendar()
     s = HbvSnowState()
     p = HbvSnowParameter()
     s.distribute(p)
     r = HbvSnowResponse()
     calc = HbvSnowCalculator(p)
     t0 = utc.time(2016, 10, 1)
     t1 = utc.time(2016, 10, 2)
     dt = deltahours(1)
     temp = 0.4
     prec_mm_h = 0.3
     # Just check that we don't get an error when stepping
     calc.step(s, r, t0, t1, prec_mm_h, temp)
Ejemplo n.º 24
0
    def test_failures(self):
        """
        Verify that dtss client server connections are auto-magically
        restored and fixed
        """
        with tempfile.TemporaryDirectory() as c_dir:

            # start the server
            dtss = DtsServer()
            port_no = find_free_port()
            host_port = 'localhost:{0}'.format(port_no)
            dtss.set_listening_port(port_no)
            dtss.set_container(
                "test", c_dir
            )  # notice we set container 'test' to point to c_dir directory
            dtss.start_async(
            )  # the internal shyft time-series will be stored to that container

            # setup some data
            utc = Calendar()
            d = deltahours(1)
            n = 365 * 24 // 3
            t = utc.time(2016, 1, 1)
            ta = TimeAxis(t, d, n)
            tsv = TsVector()
            pts = TimeSeries(ta, np.linspace(start=0, stop=1.0, num=ta.size()),
                             point_fx.POINT_AVERAGE_VALUE)
            tsv.append(TimeSeries("cache://test/foo", pts))

            # get a client
            client = DtsClient(host_port, auto_connect=False)
            client.store_ts(tsv)
            client.close()
            client.store_ts(tsv)  # should just work, it re-open automagically
            dtss.clear(
            )  # the server is out and away, no chance this would work
            try:
                client.store_ts(tsv)
                self.assertTrue(
                    False,
                    'This should throw, because there is no dtss server to help you'
                )
            except Exception as ee:
                self.assertFalse(False, f'expected {ee} here')

            dtss.set_listening_port(port_no)
            dtss.start_async()
            client.store_ts(
                tsv)  # this should just work, automagically reconnect
Ejemplo n.º 25
0
 def test_hbv_physical_snow_step(self):
     utc = Calendar()
     s = HbvPhysicalSnowState()
     p = HbvPhysicalSnowParameter()
     r = HbvPhysicalSnowResponse()
     s.distribute(p)
     calc = HbvPhysicalSnowCalculator(p)
     t = utc.time(2016, 10, 1)
     dt = deltahours(1)
     temp = 0.4
     rad = 12.0
     prec_mm_h = 0.3
     wind_speed = 1.3
     rel_hum = 0.4
     # Just check that we don't get an error when stepping
     calc.step(s, r, t, dt, temp, rad, prec_mm_h, wind_speed, rel_hum)
Ejemplo n.º 26
0
 def test_find_with_region_model_filter(self):
     cal = Calendar()
     utc_timestamp = cal.time(YMDhms(2001, 1, 1))
     region_model_id = "neanidelv-ptgsk"
     n_cells = 10
     tags = ["initial", "unverified"]
     state_vector = self._create_state_vector(n_cells)
     # now start state_repository test
     state_repository = YamlStateRepository(self._test_state_directory)
     # put in two states, record the unique state_id..
     state_repository.put_state(region_model_id, utc_timestamp, state_vector, tags)
     state_repository.put_state("tokke-ptgsk", utc_timestamp, state_vector, tags)
     all_states = state_repository.find_state()
     neanidelv_states = state_repository.find_state(region_model_id)
     self.assertEqual(2, len(all_states))
     self.assertEqual(1, len(neanidelv_states))
     self.assertEqual(neanidelv_states[0].region_model_id, region_model_id)
Ejemplo n.º 27
0
    def test_inside_of_derivative(self):
        """Created in response to https://github.com/statkraft/shyft/issues/352"""
        values = [1, 1, 1, 1]
        utc = Calendar()
        data = np.array(values, dtype='float64')
        data_ta = TimeAxis(utc.time(2015, 1, 1), 3600, len(data))

        orig = TimeSeries(data_ta, data, POINT_AVERAGE_VALUE)
        orig_derivative_inside_inf = orig.derivative(derivative_method.BACKWARD).inside(-float('inf'), float('inf'), 0,
                                                                                        1, 0)

        def check_nan(ts):
            """Method that returns 1 for all timesteps that contain nan."""
            return ts.inside(-float('inf'), float('inf'), 1, 0, 0).values.to_numpy()

        np.testing.assert_equal(check_nan(orig + orig_derivative_inside_inf), [0., 0., 0., 0.],
                                'TimeSeries.inside() should not find any NaN-values in this TimeSeries')
Ejemplo n.º 28
0
 def test_gamma_snow_step(self):
     utc = Calendar()
     s = GammaSnowState()
     p = GammaSnowParameter()
     r = GammaSnowResponse()
     calc = GammaSnowCalculator()
     t = utc.time(2016,10,1)
     dt = deltahours(1)
     temp = 0.4
     rad = 12.0
     prec_mm_h = 0.3
     wind_speed=1.3
     rel_hum=0.4
     forest_fraction = 0.2
     altitude = 100.0
     # Just check that we don't get an error when stepping
     calc.step(s, r, t, dt, p, temp, rad, prec_mm_h, wind_speed, rel_hum, forest_fraction, altitude)
Ejemplo n.º 29
0
    def test_get_ts_info(self):
        """
        Verify we can get specific TsInfo objects for time-series from the server backend.
        """
        with tempfile.TemporaryDirectory() as c_dir:

            # start the server
            dtss = DtsServer()
            port_no = find_free_port()
            host_adr = 'localhost:{0}'.format(port_no)
            dtss.set_listening_port(port_no)
            dtss.set_container(
                "testing", c_dir
            )  # notice we set container 'test' to point to c_dir directory
            dtss.start_async(
            )  # the internal shyft time-series will be stored to that container

            # get a client
            client = DtsClient(host_adr)

            try:
                client.get_ts_info(r'shyft://testing/data')
            except Exception as e:
                pass
            else:
                # only end up here if no exceptions
                self.fail('Could fetch info for non-existing ts info')

            # setup some data
            utc = Calendar()
            d = deltahours(1)
            n = 365 * 24 // 3
            t = utc.time(2016, 1, 1)
            ta = TimeAxis(t, d, n)
            tsv = TsVector()
            pts = TimeSeries(ta, np.linspace(start=0, stop=1.0, num=ta.size()),
                             point_fx.POINT_AVERAGE_VALUE)
            tsv.append(TimeSeries(r'shyft://testing/data', pts))
            client.store_ts(tsv)

            info: TsInfo = client.get_ts_info(r'shyft://testing/data')

            self.assertEqual(info.name, r'data')
            self.assertEqual(info.point_fx, point_fx.POINT_AVERAGE_VALUE)
            self.assertEqual(info.data_period, ta.total_period())
Ejemplo n.º 30
0
 def test_easy_rating_curve_construct(self):
     utc = Calendar()
     rating_curve = RatingCurveParameters(RatingCurveTimeFunctions([
         RatingCurveTimeFunction(
             utc.time(1950, 3, 27), RatingCurveFunction(RatingCurveSegments([
                 RatingCurveSegment(lower=0.474, a=5.97489, b=-0.4745, c=2.36997)
             ]))),
         RatingCurveTimeFunction(
             utc.time(1968, 7, 29), RatingCurveFunction(RatingCurveSegments([
                 RatingCurveSegment(lower=0.25, a=2.9822, b=-0.45, c=1.5078),
                 RatingCurveSegment(lower=0.79, a=3.9513, b=-0.45, c=2.8087),
                 RatingCurveSegment(lower=1.38, a=5.7071, b=-0.45, c=2.3503),
                 RatingCurveSegment(lower=2.55, a=8.2672, b=-0.45, c=2.052)
             ])))
     ]))
     self.assertIsNotNone(rating_curve)
     flow = rating_curve.flow(utc.time(2018, 1, 1), 3.2)
     self.assertAlmostEqual(flow, 117.8103380205204)