예제 #1
0
def simple_run_demo():
    """Simple demo using HBV time-series and similar model-values

    """
    # 1. Setup the time-axis for our simulation
    normal_calendar = Calendar(3600) # we need UTC+1, since day-boundaries in day series in SmG is at UTC+1
    t_start = normal_calendar.time(2010, 9, 1) # we start at
    t_end   = normal_calendar.add(t_start,Calendar.YEAR,5) # 5 years period for simulation
    time_axis = Timeaxis(t_start, model_dt, normal_calendar.diff_units(t_start,t_end,model_dt))

    # 2. Create the shyft model from the HBV model-repository
    shyft_model = create_kjela_model(PTHSKModel, kjela.geo_ts_repository)

    # 3. establish the initial state
    # using the *pattern* of distribution after one year (so hbv-zone 1..10 get approximate distribution of discharge)
    #      *and* the observed discharge at the start time t_start
    #
    t_burnin = normal_calendar.add(t_start,Calendar.YEAR,1) # use one year to get distribution between hbvzones
    burnin_time_axis = Timeaxis(t_start, model_dt, normal_calendar.diff_units(t_start, t_burnin, model_dt))
    q_obs_m3s_ts = observed_kjela_discharge(time_axis.total_period()) # get out the observation ts
    q_obs_m3s_at_t_start= q_obs_m3s_ts(t_start) # get the m3/s at t_start
    initial_state = burn_in_state(shyft_model,burnin_time_axis, q_obs_m3s_at_t_start)

    # 4. now run the model with the established state
    #    that will start out with the burn in state
    shyft_model.run(time_axis, initial_state)

    # 5. display results etc. goes here
    plot_results(shyft_model, q_obs_m3s_ts)
    plt.show()
예제 #2
0
    def test_dtss_partition_by_average(self):
        """
        This test illustrates use of partition_by client and server-side.
        The main point here is to ensure that the evaluate period covers
        both the historical and evaluation peri
        """
        with tempfile.TemporaryDirectory() as c_dir:
            # setup data to be calculated
            utc = Calendar()
            d = deltahours(1)
            t = utc.time(2000, 1, 1)
            n = utc.diff_units(t, utc.add(t, Calendar.YEAR, 10), d)
            ta = TimeAxis(t, d, n)
            td = TimeAxis(t, d * 24, n // 24)
            n_ts = 1
            store_tsv = TsVector()  # something we store at server side
            for i in range(n_ts):
                pts = TimeSeries(
                    ta,
                    np.sin(
                        np.linspace(start=0, stop=1.0 * (i + 1),
                                    num=ta.size())),
                    point_fx.POINT_AVERAGE_VALUE)
                ts_id = shyft_store_url(f"{i}")
                store_tsv.append(TimeSeries(
                    ts_id, pts))  # generate a bound pts to store

            # start dtss server
            dtss = DtsServer()
            cache_on_write = True
            port_no = find_free_port()
            host_port = 'localhost:{0}'.format(port_no)
            dtss.set_auto_cache(True)
            dtss.set_listening_port(port_no)
            dtss.set_container(
                "test", c_dir
            )  # notice we set container 'test' to point to c_dir directory
            dtss.start_async(
            )  # the internal shyft time-series will be stored to that container

            # create dts client
            c = DtsClient(
                host_port,
                auto_connect=False)  # demonstrate object life-time connection
            c.store_ts(store_tsv,
                       overwrite_on_write=True,
                       cache_on_write=cache_on_write)

            t_0 = utc.time(2018, 1, 1)
            tax = TimeAxis(t_0, Calendar.DAY, 365)
            ts_h1 = TimeSeries(shyft_store_url(f'{0}'))
            ts_h2 = store_tsv[0]
            ts_p1 = ts_h1.partition_by(utc, t, Calendar.YEAR, 10,
                                       t_0).average(tax)
            ts_p2 = ts_h2.partition_by(utc, t, Calendar.YEAR, 10,
                                       t_0).average(tax)