Exemplo n.º 1
0
    def _get_timeseries(self):
        """
        Simple regression test of OpenDAP data repository.
        """
        epsg, bbox, bpoly = self.epsg_bbox
        dem_file = path.join(shyftdata_dir, "netcdf", "etopo180.nc")
        n_hours = 30
        t0 = self.start_date + api.deltahours(7)
        period = api.UtcPeriod(t0, t0 + api.deltahours(n_hours))

        repos = GFSDataRepository(
            epsg=epsg, dem_file=dem_file, padding=5000.0,
            utc=t0)  #//epsg, dem_file, padding=5000., utc=None
        data_names = ("temperature", "wind_speed", "precipitation",
                      "relative_humidity", "radiation")
        sources = repos.get_timeseries(data_names,
                                       period,
                                       geo_location_criteria=bpoly)
        self.assertEqual(set(data_names), set(sources.keys()))
        self.assertEqual(len(sources["temperature"]),
                         2)  # TODO: this was 6 before common changes.
        data1 = sources["temperature"][0]
        data2 = sources["temperature"][1]
        self.assertNotEqual(data1.mid_point().x, data2.mid_point().x)
        self.assertNotEqual(data1.mid_point().y, data2.mid_point().y)
        self.assertNotEqual(data1.mid_point().z, data2.mid_point().z)
        self.assertLessEqual(
            data1.ts.time(0), period.start,
            'expect returned fc ts to cover requested period')
        self.assertGreaterEqual(
            data1.ts.total_period().end, period.end,
            'expect returned fc ts to cover requested period')
Exemplo n.º 2
0
 def test_create_basic(self):
     a = np.array([[1.1, 1.2, 1.3], [2.1, 2.2, 2.3]], dtype=np.float64)
     ta = TimeAxis(deltahours(0), deltahours(1), 3)
     tsv = create_ts_vector_from_np_array(ta, a,
                                          ts_point_fx.POINT_AVERAGE_VALUE)
     self.assertIsNotNone(tsv)
     for i in range(2):
         self.assertTrue(np.allclose(tsv[i].values.to_numpy(), a[i]))
         self.assertTrue(ta == tsv[i].time_axis)
         self.assertEqual(tsv[i].point_interpretation(),
                          ts_point_fx.POINT_AVERAGE_VALUE)
     # create missmatch throws
     b = np.array([[], []], dtype=np.float64)
     try:
         create_ts_vector_from_np_array(ta, b,
                                        ts_point_fx.POINT_AVERAGE_VALUE)
         self.assertTrue(False, "Should throw for missmatch time-axis")
     except RuntimeError as e:
         pass
     # create empty ts works
     tb = TimeAxis(0, 0, 0)
     r = create_ts_vector_from_np_array(tb, b,
                                        ts_point_fx.POINT_AVERAGE_VALUE)
     self.assertEqual(len(r), 2)
     for ts in r:
         self.assertFalse(ts)
     # create empty returns empty
     c = np.empty(shape=(0, 0), dtype=np.float64)
     z = create_ts_vector_from_np_array(tb, c,
                                        ts_point_fx.POINT_AVERAGE_VALUE)
     self.assertEqual(len(z), 0)
Exemplo n.º 3
0
    def _get_forecast(self):
        """
        Simple forecast regression test of OpenDAP data repository.
        """
        epsg, bbox, bpoly = self.epsg_bbox

        dem_file = path.join(shyftdata_dir, "netcdf", "etopo180.nc")
        n_hours = 30
        t0 = self.start_date + api.deltahours(9)
        period = api.UtcPeriod(t0, t0 + api.deltahours(n_hours))
        t_c = self.start_date + api.deltahours(
            7)  # the beginning of the forecast criteria

        repos = GFSDataRepository(epsg, dem_file)
        data_names = (
            "temperature",
        )  # the full set: "wind_speed", "precipitation", "relative_humidity", "radiation")
        sources = repos.get_forecast(data_names,
                                     period,
                                     t_c,
                                     geo_location_criteria=bpoly)
        self.assertEqual(set(data_names), set(sources.keys()))
        self.assertEqual(len(sources["temperature"]), 2)
        data1 = sources["temperature"][0]
        data2 = sources["temperature"][1]
        self.assertNotEqual(data1.mid_point().x, data2.mid_point().x)
        self.assertNotEqual(data1.mid_point().y, data2.mid_point().y)
        self.assertNotEqual(data1.mid_point().z, data2.mid_point().z)
        self.assertLessEqual(
            data1.ts.time(0), period.start,
            'expect returned fc ts to cover requested period')
        self.assertGreaterEqual(
            data1.ts.total_period().end, period.end,
            'expect returned fc ts to cover requested period')
Exemplo n.º 4
0
 def test_create_xx_source_vector(self):
     # arrange the setup
     a = np.array([[1.1, 1.2, 1.3], [2.1, 2.2, 2.3]], dtype=np.float64)
     ta = TimeAxis(deltahours(0), deltahours(1), 3)
     gpv = GeoPointVector()
     gpv[:] = [GeoPoint(1, 2, 3), GeoPoint(4, 5, 6)]
     cfs = [(create_precipitation_source_vector_from_np_array,
             PrecipitationSourceVector),
            (create_temperature_source_vector_from_np_array,
             TemperatureSourceVector),
            (create_radiation_source_vector_from_np_array,
             RadiationSourceVector),
            (create_rel_hum_source_vector_from_np_array,
             RelHumSourceVector),
            (create_radiation_source_vector_from_np_array,
             RadiationSourceVector)]
     # test all creation types:
     for cf in cfs:
         r = cf[0](ta, gpv, a, ts_point_fx.POINT_AVERAGE_VALUE)  # act here
         self.assertTrue(isinstance(r, cf[1]))  # then the asserts
         self.assertEqual(len(r), len(gpv))
         for i in range(len(gpv)):
             self.assertEqual(r[i].mid_point(), gpv[i])
             self.assertTrue(np.allclose(r[i].ts.values.to_numpy(), a[i]))
             self.assertEqual(r[i].ts.point_interpretation(),
                              ts_point_fx.POINT_AVERAGE_VALUE)
Exemplo n.º 5
0
    def test_get_ensemble(self):
        EPSG = 32633
        upper_left_x = 436100.0
        upper_left_y = 7417800.0
        nx = 74
        ny = 94
        dx = 1000.0
        dy = 1000.0
        # Period start
        year = 2015
        month = 7
        day = 26
        hour = 0
        n_hours = 30
        utc = api.Calendar()  # No offset gives Utc
        t0 = api.YMDhms(year, month, day, hour)
        period = api.UtcPeriod(utc.time(t0), utc.time(t0) + api.deltahours(n_hours))
        t_c = utc.time(t0) + api.deltahours(1)

        base_dir = path.join(shyftdata_dir, "netcdf", "arome")
        pattern = "fc*.nc"
        bbox = ([upper_left_x, upper_left_x + nx*dx,
                 upper_left_x + nx*dx, upper_left_x],
                [upper_left_y, upper_left_y,
                 upper_left_y - ny*dy, upper_left_y - ny*dy])
        try:
            repos = AromeDataRepository(EPSG, base_dir, filename=pattern, bounding_box=bbox)
            data_names = ("temperature", "wind_speed", "relative_humidity")
            ensemble = repos.get_forecast_ensemble(data_names, period, t_c, None)
            self.assertTrue(isinstance(ensemble, list))
            self.assertEqual(len(ensemble), 10)
        except AromeDataRepositoryError as adre:
            self.skipTest("(test inconclusive- missing arome-data {0})".format(adre))
Exemplo n.º 6
0
    def test_get_ensemble(self):
        """
        Simple ensemble regression test of OpenDAP data repository.
        """
        epsg, bbox = self.epsg_bbox

        dem_file = path.join(shyftdata_dir, "netcdf", "etopo180.nc")

        # Period start
        (year, month, day), hour = self.start_date, 9
        n_hours = 30
        utc = api.Calendar()  # No offset gives Utc
        t0 = api.YMDhms(year, month, day, hour)
        period = api.UtcPeriod(utc.time(t0), utc.time(t0) + api.deltahours(n_hours))
        t_c = utc.time(t0) + api.deltahours(7)

        repos = GFSDataRepository(epsg, dem_file, bounding_box=bbox)
        data_names = ("temperature", "wind_speed", "precipitation",
                      "relative_humidity", "radiation")
        ensembles = repos.get_forecast_ensemble(data_names, period, t_c, None)
        for sources in ensembles:
            self.assertEqual(set(data_names), set(sources.keys()))
            self.assertEqual(len(sources["temperature"]), 6)
            data1 = sources["temperature"][0]
            data2 = sources["temperature"][1]
            self.assertNotEqual(data1.mid_point().x, data2.mid_point().x)
            self.assertNotEqual(data1.mid_point().y, data2.mid_point().y)
            self.assertNotEqual(data1.mid_point().z, data2.mid_point().z)
            h_dt = (data1.ts.time(1) - data1.ts.time(0))/3600
            self.assertEqual(data1.ts.size(), 30//h_dt)
Exemplo n.º 7
0
    def test_ts_get_krls_predictor(self):
        t0=api.utctime_now()
        ta=api.TimeAxis(t0, api.deltahours(1), 30*24)
        data=np.sin(np.linspace(0, 2*np.pi, ta.size()))
        ts_data=api.TimeSeries(ta, data, api.POINT_INSTANT_VALUE)

        ts=api.TimeSeries("a")

        try:
            ts.get_krls_predictor()
            self.fail("should not be able to get predictor for unbound")
        except:
            pass

        fbi=ts.find_ts_bind_info()
        fbi[0].ts.bind(ts_data)
        ts.bind_done()

        pred=ts.get_krls_predictor(api.deltahours(3))

        ts_krls=pred.predict(ta)
        self.assertEqual(len(ts_krls), len(ts_data))
        ts_mse=pred.mse_ts(ts_data)
        self.assertEqual(len(ts_mse), len(ts_data))
        for i in range(len(ts_krls)):
            self.assertAlmostEqual(ts_krls.values[i], ts_data.values[i], places=1)
            self.assertAlmostEqual(ts_mse.values[i], 0, places=2)
        self.assertAlmostEqual(pred.predictor_mse(ts_data), 0, places=2)
Exemplo n.º 8
0
    def test_get_forecast(self):
        # Period start
        year = 2015
        month = 8
        day = 24
        hour = 6
        n_hours = 65
        utc = api.Calendar()  # No offset gives Utc
        t0 = api.YMDhms(year, month, day, hour)
        period = api.UtcPeriod(utc.time(t0), utc.time(t0) + api.deltahours(n_hours))
        t_c1 = utc.time(t0) + api.deltahours(1)
        t_c2 = utc.time(t0) + api.deltahours(7)

        base_dir = path.join(shyftdata_dir, "repository", "arome_data_repository")
        pattern = "arome_metcoop*default2_5km_*.nc"
        EPSG, bbox = self.arome_epsg_bbox

        repos = AromeDataRepository(EPSG, base_dir, filename=pattern, bounding_box=bbox)
        data_names = ("temperature", "wind_speed", "precipitation", "relative_humidity")
        tc1_sources = repos.get_forecast(data_names, period, t_c1, None)
        tc2_sources = repos.get_forecast(data_names, period, t_c2, None)

        self.assertTrue(len(tc1_sources) == len(tc2_sources))
        self.assertTrue(set(tc1_sources) == set(data_names))
        self.assertTrue(tc1_sources["temperature"][0].ts.size() == n_hours + 1)

        tc1_precip = tc1_sources["precipitation"][0].ts
        tc2_precip = tc2_sources["precipitation"][0].ts

        self.assertEqual(tc1_precip.size(), n_hours)
        self.assertTrue(tc1_precip.time(0) != tc2_precip.time(0))
Exemplo n.º 9
0
    def test_get_ensemble(self):
        EPSG = 32633
        upper_left_x = 436100.0
        upper_left_y = 7417800.0
        nx = 74
        ny = 94
        dx = 1000.0
        dy = 1000.0
        # Period start
        n_hours = 30
        utc = api.Calendar()  # No offset gives Utc
        t0 = utc.time(2015, 7, 26)
        period = api.UtcPeriod(t0, t0 + api.deltahours(n_hours))
        t_c = t0 + api.deltahours(1)

        base_dir = path.join(shyftdata_dir, "netcdf", "arome")
        # pattern = "fc*.nc"
        pattern = "fc_(\d{4})(\d{2})(\d{2})[T_](\d{2})Z?.nc$"
        bpoly = box(upper_left_x, upper_left_y - ny * dy,
                    upper_left_x + nx * dx, upper_left_y)
        try:
            repos = MetNetcdfDataRepository(EPSG, base_dir, filename=pattern)
            data_names = ("temperature", "wind_speed", "relative_humidity")
            ensemble = repos.get_forecast_ensemble(data_names,
                                                   period,
                                                   t_c,
                                                   geo_location_criteria=bpoly)
            self.assertTrue(isinstance(ensemble, list))
            self.assertEqual(len(ensemble), 10)
        except MetNetcdfDataRepositoryError as adre:
            self.skipTest(
                "(test inconclusive- missing arome-data {0})".format(adre))
Exemplo n.º 10
0
    def test_get_ensemble(self):
        """
        Simple ensemble regression test of OpenDAP data repository.
        """
        epsg, bbox = self.epsg_bbox
        dem_file = path.join(shyftdata_dir, "netcdf", "etopo180.nc")
        n_hours = 30
        t0 = self.start_date + api.deltahours(
            9)  # api.YMDhms(year, month, day, hour)
        period = api.UtcPeriod(t0, t0 + api.deltahours(n_hours))
        t_c = t0

        repos = GFSDataRepository(epsg, dem_file, bounding_box=bbox)
        data_names = (
            "temperature",
        )  # this is the full set: "wind_speed", "precipitation", "relative_humidity", "radiation")
        ensembles = repos.get_forecast_ensemble(data_names, period, t_c, None)
        for sources in ensembles:
            self.assertEqual(set(data_names), set(sources.keys()))
            self.assertEqual(len(sources["temperature"]), 6)
            data1 = sources["temperature"][0]
            data2 = sources["temperature"][1]
            self.assertNotEqual(data1.mid_point().x, data2.mid_point().x)
            self.assertNotEqual(data1.mid_point().y, data2.mid_point().y)
            self.assertNotEqual(data1.mid_point().z, data2.mid_point().z)
            self.assertLessEqual(
                data1.ts.time(0), period.start,
                'expect returned fc ts to cover requested period')
            self.assertGreaterEqual(
                data1.ts.total_period().end, period.end,
                'expect returned fc ts to cover requested period')
Exemplo n.º 11
0
    def test_get_ensemble(self):
        """
        Simple ensemble regression test of OpenDAP data repository.
        """
        epsg, bbox = self.epsg_bbox

        dem_file = path.join(shyftdata_dir, "netcdf", "etopo180.nc")

        # Period start
        (year, month, day), hour = self.start_date, 9
        n_hours = 30
        utc = api.Calendar()  # No offset gives Utc
        t0 = api.YMDhms(year, month, day, hour)
        period = api.UtcPeriod(utc.time(t0),
                               utc.time(t0) + api.deltahours(n_hours))
        t_c = utc.time(t0) + api.deltahours(7)

        repos = GFSDataRepository(epsg, dem_file, bounding_box=bbox)
        data_names = ("temperature", "wind_speed", "precipitation",
                      "relative_humidity", "radiation")
        ensembles = repos.get_forecast_ensemble(data_names, period, t_c, None)
        for sources in ensembles:
            self.assertEqual(set(data_names), set(sources.keys()))
            self.assertEqual(len(sources["temperature"]), 6)
            data1 = sources["temperature"][0]
            data2 = sources["temperature"][1]
            self.assertNotEqual(data1.mid_point().x, data2.mid_point().x)
            self.assertNotEqual(data1.mid_point().y, data2.mid_point().y)
            self.assertNotEqual(data1.mid_point().z, data2.mid_point().z)
            h_dt = (data1.ts.time(1) - data1.ts.time(0)) / 3600
            self.assertEqual(data1.ts.size(), 30 // h_dt)
    def test_get_ensemble_forecast_collection(self):
        EPSG = 32633
        upper_left_x = 436100.0
        upper_left_y = 7417800.0
        nx = 74
        ny = 94
        dx = 1000.0
        dy = 1000.0
        t0 = api.YMDhms(2015, 7, 26, 0)
        n_hours = 30
        utc = api.Calendar()  # No offset gives Utc
        period = api.UtcPeriod(utc.time(t0), utc.time(t0) + api.deltahours(n_hours))
        t_c = utc.time(t0) + api.deltahours(1)

        base_dir = path.join(shyftdata_dir, "netcdf", "arome")
        pattern = "fc2015072600.nc"
        bbox = ([upper_left_x, upper_left_x + nx * dx,
                 upper_left_x + nx * dx, upper_left_x],
                [upper_left_y, upper_left_y,
                 upper_left_y - ny * dy, upper_left_y - ny * dy])
        try:
            ar1 = AromeDataRepository(EPSG, base_dir, filename=pattern, bounding_box=bbox)
            ar2 = AromeDataRepository(EPSG, base_dir, filename=pattern, bounding_box=bbox)
            repos = GeoTsRepositoryCollection([ar1, ar2])
            data_names = ("temperature", "wind_speed", "relative_humidity")
            ensemble = repos.get_forecast_ensemble(data_names, period, t_c, None)
            self.assertTrue(isinstance(ensemble, list))
            self.assertEqual(len(ensemble), 10)
            with self.assertRaises(GeoTsRepositoryCollectionError) as context:
                repos = GeoTsRepositoryCollection([ar1, ar2], reduce_type="add")
                repos.get_forecast_ensemble(data_names, period, t_c, None)
            self.assertEqual("Only replace is supported yet", context.exception.args[0])
        except AromeDataRepositoryError as adre:
            self.skipTest("(test inconclusive- missing arome-data {0})".format(adre))
    def test_get_forecast_collection(self):
        n_hours = 30
        dt = api.deltahours(1)
        utc = api.Calendar()  # No offset gives Utc
        tc = api.YMDhms(2015, 8, 24, 6)
        t0 = utc.time(tc)
        period = api.UtcPeriod(t0, t0 + api.deltahours(n_hours))
        date_str = "{}{:02}{:02}_{:02}".format(tc.year, tc.month, tc.day, tc.hour)

        epsg, bbox = self.arome_epsg_bbox

        base_dir = path.join(shyftdata_dir, "repository", "arome_data_repository")
        f1 = "arome_metcoop_red_default2_5km_{}.nc".format(date_str)
        f2 = "arome_metcoop_red_test2_5km_{}.nc".format(date_str)

        ar1 = AromeDataRepository(epsg, base_dir, filename=f1, allow_subset=True)
        ar2 = AromeDataRepository(epsg, base_dir, filename=f2, elevation_file=f1, allow_subset=True)

        geo_ts_repository = GeoTsRepositoryCollection([ar1, ar2])
        source_names = ("temperature", "radiation")
        sources = geo_ts_repository.get_forecast(source_names, period, t0,
                                                 geo_location_criteria=bbox)
        self.assertTrue(all([x in source_names for x in sources]))

        geo_ts_repository = GeoTsRepositoryCollection([ar1, ar2], reduce_type="add")
        with self.assertRaises(GeoTsRepositoryCollectionError) as context:
            sources = geo_ts_repository.get_forecast(("temperature", "radiation"),
                                                     period, t0, geo_location_criteria=bbox)
Exemplo n.º 14
0
    def test_bias_predictor(self):
        """
        Verify that if we feed forecast[n] and observation into the bias-predictor
        it will create the estimated bias offsets
        """
        f = api.KalmanFilter()
        bp = api.KalmanBiasPredictor(f)
        self.assertIsNotNone(bp)
        self.assertEqual(bp.filter.parameter.n_daily_observations, 8)

        n_fc = 8
        utc = api.Calendar()
        t0 = utc.time(2016, 1, 1)
        dt = api.deltahours(1)
        n_fc_steps = 36  # e.g. like arome 36 hours
        fc_dt = api.deltahours(6)
        fc_fx = lambda time_axis: self._create_fc_values(time_axis, 2.0)  # just return a constant 2.0 deg C for now
        fc_set = self._create_geo_forecast_set(n_fc, t0, dt, n_fc_steps, fc_dt, fc_fx)
        n_obs = 24
        obs_ta = api.Timeaxis2(t0, dt, n_obs)
        obs_ts = api.Timeseries(obs_ta, fill_value=0.0)
        kalman_dt = api.deltahours(3)  # suitable average for prediction temperature
        kalman_ta = api.Timeaxis2(t0, kalman_dt, 8)
        bp.update_with_forecast(fc_set, obs_ts, kalman_ta)  # here we feed in forecast-set and observation into kalman
        fc_setv = self._create_forecast_set(n_fc, t0, dt, n_fc_steps, fc_dt, fc_fx)
        bp.update_with_forecast(fc_setv, obs_ts, kalman_ta)  # also verify we can feed in a pure TsVector
        bias_pattern = bp.state.x  # the bp.state.x is now the best estimates fo the bias between fc and observation
        self.assertEqual(len(bias_pattern), 8)
        for i in range(len(bias_pattern)):
            self.assertLess(abs(bias_pattern[i] - 2.0), 0.2)  # bias should iterate to approx 2.0 degC now.
Exemplo n.º 15
0
    def _call_qm(self, prep_fcst_lst, weights, geo_points, ta,
                 input_source_types, nb_prior_scenarios):

        # Check interpolation period is within time axis (ta)
        ta_start = ta.time(0)
        ta_end = ta.time(ta.size() - 1)  # start of last time step
        interp_start = ta_start + api.deltahours(self.qm_interp_hours[0])
        interp_end = ta_start + api.deltahours(self.qm_interp_hours[1])
        if interp_start > ta_end:
            interp_start = api.no_utctime
            interp_end = api.no_utctime
        if interp_end > ta_end:
            interp_end = ta_end

        # Re-organize data before sending to api.quantile_map_forecast. For each source type and geo_point, group
        # forecasts as TsVectorSets, send to api.quantile_map_forecast and return results as ensemble of source-keyed
        # dictionaries of  geo-ts
        # First re-organize weights - one weight per TVS.
        weight_sets = api.DoubleVector([w for ws in weights for w in ws])

        # New version
        results = [{} for i in range(nb_prior_scenarios)]
        for src in input_source_types:
            qm_scenarios = []
            for geo_pt_idx, geo_pt in enumerate(geo_points):
                forecast_sets = api.TsVectorSet()
                for i, fcst_group in enumerate(prep_fcst_lst):
                    for j, forecast in enumerate(fcst_group):
                        scenarios = api.TsVector()
                        for member in forecast:
                            scenarios.append(member[src][geo_pt_idx].ts)
                        forecast_sets.append(scenarios)
                        if i == self.repo_prior_idx and j == 0:
                            prior_data = scenarios
                            # TODO: read prior if repo_prior_idx is None

                qm_scenarios.append(
                    api.quantile_map_forecast(forecast_sets, weight_sets,
                                              prior_data, ta, interp_start,
                                              interp_end, True))

            # Alternative: convert to array to enable slicing
            # arr = np.array(qm_scenarios)

            # Now organize to desired output format: ensemble of source-keyed dictionaries of  geo-ts
            for i in range(0, nb_prior_scenarios):
                # source_dict = {}
                # ts_vct = arr[:, i]
                ts_vct = [x[i] for x in qm_scenarios]
                vct = self.source_vector_map[src]()
                [
                    vct.append(self.source_type_map[src](geo_pt, ts))
                    for geo_pt, ts in zip(geo_points, ts_vct)
                ]
                # Alternatives:
                # vct[:] = [self.source_type_map[src](geo_pt, ts) for geo_pt, ts in zip(geo_points, ts_vct)]
                # vct = self.source_vector_map[src]([self.source_type_map[src](geo_pt, ts) for geo_pt, ts in zip(geo_points, ts_vct)])
                results[i][src] = vct
        return results
Exemplo n.º 16
0
def continuous_calibration():
    utc = Calendar()
    t_start = utc.time(YMDhms(2011, 9, 1))
    t_fc_start = utc.time(YMDhms(2015, 10, 1))
    dt = deltahours(1)
    n_obs = int(round((t_fc_start - t_start)/dt))
    obs_time_axis = TimeAxisFixedDeltaT(t_start, dt, n_obs + 1)
    q_obs_m3s_ts = observed_tistel_discharge(obs_time_axis.total_period())

    ptgsk = create_tistel_simulator(PTGSKOptModel, tistel.geo_ts_repository(tistel.grid_spec.epsg()))
    initial_state = burn_in_state(ptgsk, t_start, utc.time(YMDhms(2012, 9, 1)), q_obs_m3s_ts)

    num_opt_days = 30
    # Step forward num_opt_days days and store the state for each day:
    recal_start = t_start + deltahours(num_opt_days*24)
    t = t_start
    state = initial_state
    opt_states = {t: state}
    while t < recal_start:
        ptgsk.run(TimeAxisFixedDeltaT(t, dt, 24), state)
        t += deltahours(24)
        state = ptgsk.reg_model_state
        opt_states[t] = state

    recal_stop = utc.time(YMDhms(2011, 10, 30))
    recal_stop = utc.time(YMDhms(2012, 5, 30))
    curr_time = recal_start
    q_obs_avg = TsTransform().to_average(t_start, dt, n_obs + 1, q_obs_m3s_ts)
    target_spec = TargetSpecificationPts(q_obs_avg, IntVector([0]), 1.0, KLING_GUPTA)
    target_spec_vec = TargetSpecificationVector([target_spec])
    i = 0
    times = []
    values = []
    p, p_min, p_max = construct_calibration_parameters(ptgsk)
    while curr_time < recal_stop:
        print(i)
        i += 1
        opt_start = curr_time - deltahours(24*num_opt_days)
        opt_state = opt_states.pop(opt_start)
        p = ptgsk.region_model.get_region_parameter()
        p_opt = ptgsk.optimize(TimeAxisFixedDeltaT(opt_start, dt, 24*num_opt_days), opt_state, target_spec_vec,
                               p, p_min, p_max, tr_stop=1.0e-5)
        ptgsk.region_model.set_region_parameter(p_opt)
        corr_state = adjust_simulator_state(ptgsk, curr_time, q_obs_m3s_ts)
        ptgsk.run(TimeAxisFixedDeltaT(curr_time, dt, 24), corr_state)
        curr_time += deltahours(24)
        opt_states[curr_time] = ptgsk.reg_model_state
        discharge = ptgsk.region_model.statistics.discharge([0])
        times.extend(discharge.time(i) for i in range(discharge.size()))
        values.extend(list(np.array(discharge.v)))
    plt.plot(utc_to_greg(times), values)
    plot_results(None, q_obs=observed_tistel_discharge(UtcPeriod(recal_start, recal_stop)))
    set_calendar_formatter(Calendar())
    #plt.interactive(1)
    plt.title("Continuously recalibrated discharge vs observed")
    plt.xlabel("Time in UTC")
    plt.ylabel(r"Discharge in $\mathbf{m^3s^{-1}}$", verticalalignment="top", rotation="horizontal")
    plt.gca().yaxis.set_label_coords(0, 1.1)
Exemplo n.º 17
0
def continuous_calibration():
    utc = Calendar()
    t_start = utc.time(YMDhms(2011, 9, 1))
    t_fc_start = utc.time(YMDhms(2015, 10, 1))
    dt = deltahours(1)
    n_obs = int(round((t_fc_start - t_start)/dt))
    obs_time_axis = Timeaxis(t_start, dt, n_obs + 1)
    q_obs_m3s_ts = observed_tistel_discharge(obs_time_axis.total_period())

    ptgsk = create_tistel_simulator(PTGSKOptModel, tistel.geo_ts_repository(tistel.grid_spec.epsg()))
    initial_state = burn_in_state(ptgsk, t_start, utc.time(YMDhms(2012, 9, 1)), q_obs_m3s_ts)

    num_opt_days = 30
    # Step forward num_opt_days days and store the state for each day:
    recal_start = t_start + deltahours(num_opt_days*24)
    t = t_start
    state = initial_state
    opt_states = {t: state}
    while t < recal_start:
        ptgsk.run(Timeaxis(t, dt, 24), state)
        t += deltahours(24)
        state = ptgsk.reg_model_state
        opt_states[t] = state

    recal_stop = utc.time(YMDhms(2011, 10, 30))
    recal_stop = utc.time(YMDhms(2012, 5, 30))
    curr_time = recal_start
    q_obs_avg = TsTransform().to_average(t_start, dt, n_obs + 1, q_obs_m3s_ts)
    target_spec = TargetSpecificationPts(q_obs_avg, IntVector([0]), 1.0, KLING_GUPTA)
    target_spec_vec = TargetSpecificationVector([target_spec])
    i = 0
    times = []
    values = []
    p, p_min, p_max = construct_calibration_parameters(ptgsk)
    while curr_time < recal_stop:
        print(i)
        i += 1
        opt_start = curr_time - deltahours(24*num_opt_days)
        opt_state = opt_states.pop(opt_start)
        p = ptgsk.region_model.get_region_parameter()
        p_opt = ptgsk.optimize(Timeaxis(opt_start, dt, 24*num_opt_days), opt_state, target_spec_vec,
                               p, p_min, p_max, tr_stop=1.0e-5)
        ptgsk.region_model.set_region_parameter(p_opt)
        corr_state = adjust_simulator_state(ptgsk, curr_time, q_obs_m3s_ts)
        ptgsk.run(Timeaxis(curr_time, dt, 24), corr_state)
        curr_time += deltahours(24)
        opt_states[curr_time] = ptgsk.reg_model_state
        discharge = ptgsk.region_model.statistics.discharge([0])
        times.extend(discharge.time(i) for i in range(discharge.size()))
        values.extend(list(np.array(discharge.v)))
    plt.plot(utc_to_greg(times), values)
    plot_results(None, q_obs=observed_tistel_discharge(UtcPeriod(recal_start, recal_stop)))
    set_calendar_formatter(Calendar())
    #plt.interactive(1)
    plt.title("Continuously recalibrated discharge vs observed")
    plt.xlabel("Time in UTC")
    plt.ylabel(r"Discharge in $\mathbf{m^3s^{-1}}$", verticalalignment="top", rotation="horizontal")
    plt.gca().yaxis.set_label_coords(0, 1.1)
Exemplo n.º 18
0
 def test_extract_conversion_factors_from_string(self):
     u = utime('hours since 1970-01-01 00:00:00')
     t_origin = api.Calendar(u.tzoffset).time(
         api.YMDhms(u.origin.year, u.origin.month, u.origin.day, u.origin.hour, u.origin.minute, u.origin.second))
     delta_t_dic = {'days': api.deltahours(24), 'hours': api.deltahours(1), 'minutes': api.deltaminutes(1)}
     delta_t = delta_t_dic[u.units]
     self.assertIsNotNone(u)
     self.assertEqual(delta_t, api.deltahours(1))
     self.assertEqual(t_origin, 0)
Exemplo n.º 19
0
 def test_integral_fine_resolution(self):
     """ Case study for last-interval bug from python"""
     utc=api.Calendar()
     ta=api.TimeAxis(utc.time(2017, 10, 16), api.deltahours(24*7), 219)
     tf=api.TimeAxis(utc.time(2017, 10, 16), api.deltahours(3), 12264)
     src=api.TimeSeries(ta, fill_value=1.0, point_fx=api.POINT_AVERAGE_VALUE)
     ts=src.integral(tf)
     self.assertIsNotNone(ts)
     for i in range(len(tf)):
         if not math.isclose(ts.value(i), 1.0*api.deltahours(3)):
             self.assertAlmostEqual(ts.value(i), 1.0*api.deltahours(3))
Exemplo n.º 20
0
 def _predict_bias(self, obs_set, fc_set):
     # Return a set of bias_ts per observation geo_point
     bias_set = api.TemperatureSourceVector()
     kf = api.KalmanFilter()
     kbp = api.KalmanBiasPredictor(kf)
     kta = api.Timeaxis2(self.t0, api.deltahours(3), 8)
     for obs in obs_set:
         kbp.update_with_forecast(fc_set, obs.ts, kta)
         pattern = api.KalmanState.get_x(kbp.state)
         # a_ts = api.Timeseries(pattern, api.deltahours(3), self.ta)  # can do using ct of Timeseries, or:
         b_ts = api.create_periodic_pattern_ts(pattern, api.deltahours(3), self.ta.time(0), self.ta)  # function
         bias_set.append(api.TemperatureSource(obs.mid_point(), b_ts))
     return bias_set
Exemplo n.º 21
0
 def test_extract_conversion_factors_from_string(self):
     u = utime('hours since 1970-01-01 00:00:00')
     t_origin = api.Calendar(u.tzoffset).time(
         api.YMDhms(u.origin.year, u.origin.month, u.origin.day,
                    u.origin.hour, u.origin.minute, u.origin.second))
     delta_t_dic = {
         'days': api.deltahours(24),
         'hours': api.deltahours(1),
         'minutes': api.deltaminutes(1)
     }
     delta_t = delta_t_dic[u.units]
     self.assertIsNotNone(u)
     self.assertEqual(delta_t, api.deltahours(1))
     self.assertEqual(t_origin, 0)
Exemplo n.º 22
0
 def _predict_bias(self, obs_set, fc_set):
     # Return a set of bias_ts per observation geo_point
     bias_set = api.TemperatureSourceVector()
     kf = api.KalmanFilter()
     kbp = api.KalmanBiasPredictor(kf)
     kta = api.TimeAxis(self.t0, api.deltahours(3), 8)
     for obs in obs_set:
         kbp.update_with_forecast(fc_set, obs.ts, kta)
         pattern = api.KalmanState.get_x(kbp.state)
         # a_ts = api.TimeSeries(pattern, api.deltahours(3), self.ta)  # can do using ct of TimeSeries, or:
         b_ts = api.create_periodic_pattern_ts(pattern, api.deltahours(3),
                                               self.ta.time(0),
                                               self.ta)  # function
         bias_set.append(api.TemperatureSource(obs.mid_point(), b_ts))
     return bias_set
Exemplo n.º 23
0
 def test_periodic_pattern_ts(self):
     c = api.Calendar()
     t0 = c.time(2016, 1, 1)
     dt = api.deltahours(1)
     n = 240
     ta = api.Timeaxis2(t0, dt, n)
     pattern_values = api.DoubleVector.from_numpy(np.arange(8))
     pattern_dt = api.deltahours(3)
     pattern_t0 = c.time(2015,6,1)
     pattern_ts = api.create_periodic_pattern_ts(pattern_values, pattern_dt, pattern_t0, ta)  # this is how to create a periodic pattern ts (used in gridpp/kalman bias handling)
     self.assertAlmostEqual(pattern_ts.value(0), 0.0)
     self.assertAlmostEqual(pattern_ts.value(1), 0.0)
     self.assertAlmostEqual(pattern_ts.value(2), 0.0)
     self.assertAlmostEqual(pattern_ts.value(3), 1.0)  # next step in pattern starts here
     self.assertAlmostEqual(pattern_ts.value(24), 0.0)  # next day repeats the pattern
Exemplo n.º 24
0
 def test_periodic_pattern_ts(self):
     c = api.Calendar()
     t0 = c.time(2016, 1, 1)
     dt = api.deltahours(1)
     n = 240
     ta = api.Timeaxis2(t0, dt, n)
     pattern_values = api.DoubleVector.from_numpy(np.arange(8))
     pattern_dt = api.deltahours(3)
     pattern_t0 = c.time(2015,6,1)
     pattern_ts = api.create_periodic_pattern_ts(pattern_values, pattern_dt, pattern_t0, ta)  # this is how to create a periodic pattern ts (used in gridpp/kalman bias handling)
     self.assertAlmostEqual(pattern_ts.value(0), 0.0)
     self.assertAlmostEqual(pattern_ts.value(1), 0.0)
     self.assertAlmostEqual(pattern_ts.value(2), 0.0)
     self.assertAlmostEqual(pattern_ts.value(3), 1.0)  # next step in pattern starts here
     self.assertAlmostEqual(pattern_ts.value(24), 0.0)  # next day repeats the pattern
Exemplo n.º 25
0
    def test_get_ensemble_forecast_collection(self):
        EPSG = 32633
        upper_left_x = 436100.0
        upper_left_y = 7417800.0
        nx = 74
        ny = 94
        dx = 1000.0
        dy = 1000.0
        t0 = api.YMDhms(2015, 7, 26, 0)
        n_hours = 30
        utc = api.Calendar()  # No offset gives Utc
        period = api.UtcPeriod(utc.time(t0),
                               utc.time(t0) + api.deltahours(n_hours))
        t_c = utc.time(t0) + api.deltahours(1)

        base_dir = path.join(shyftdata_dir, "netcdf", "arome")
        pattern = "fc2015072600.nc"
        bbox = ([
            upper_left_x, upper_left_x + nx * dx, upper_left_x + nx * dx,
            upper_left_x
        ], [
            upper_left_y, upper_left_y, upper_left_y - ny * dy,
            upper_left_y - ny * dy
        ])
        try:
            ar1 = AromeDataRepository(EPSG,
                                      base_dir,
                                      filename=pattern,
                                      bounding_box=bbox)
            ar2 = AromeDataRepository(EPSG,
                                      base_dir,
                                      filename=pattern,
                                      bounding_box=bbox)
            repos = GeoTsRepositoryCollection([ar1, ar2])
            data_names = ("temperature", "wind_speed", "relative_humidity")
            ensemble = repos.get_forecast_ensemble(data_names, period, t_c,
                                                   None)
            self.assertTrue(isinstance(ensemble, list))
            self.assertEqual(len(ensemble), 10)
            with self.assertRaises(GeoTsRepositoryCollectionError) as context:
                repos = GeoTsRepositoryCollection([ar1, ar2],
                                                  reduce_type="add")
                repos.get_forecast_ensemble(data_names, period, t_c, None)
            self.assertEqual("Only replace is supported yet",
                             context.exception.args[0])
        except AromeDataRepositoryError as adre:
            self.skipTest(
                "(test inconclusive- missing arome-data {0})".format(adre))
Exemplo n.º 26
0
 def prec_acc_conv(p):
     indx = np.nonzero([self.cal.calendar_units(ti).hour in self.analysis_hours for ti in time])[0]
     f = 1000.*api.deltahours(1)/(time[1] - time[0]) # conversion from m/delta_t to mm/1hour
     dp = (p[1:] - p[:-1])*f
     dp[indx] = p[indx+1]*f
     #return np.clip(p[1:] - p[:-1], 0.0, 1000.0)
     return dp
Exemplo n.º 27
0
def ensemble_demo():
    utc = Calendar()
    t_start = utc.time(YMDhms(2011, 9, 1))
    t_fc_ens_start = utc.time(YMDhms(2015, 7, 26))
    disp_start = utc.time(YMDhms(2015, 7, 20))
    dt = deltahours(1)
    n_obs = int(round((t_fc_ens_start - t_start)/dt))
    n_fc_ens = 30
    n_disp = int(round(t_fc_ens_start - disp_start)/dt) + n_fc_ens + 24*7

    obs_time_axis = Timeaxis(t_start, dt, n_obs + 1)
    fc_ens_time_axis = Timeaxis(t_fc_ens_start, dt, n_fc_ens)
    display_time_axis = Timeaxis(disp_start, dt, n_disp)

    q_obs_m3s_ts = observed_tistel_discharge(obs_time_axis.total_period())
    ptgsk = create_tistel_simulator(PTGSKOptModel, tistel.geo_ts_repository(tistel.grid_spec.epsg()))
    initial_state = burn_in_state(ptgsk, t_start, utc.time(YMDhms(2012, 9, 1)), q_obs_m3s_ts)

    ptgsk.run(obs_time_axis, initial_state)
    current_state = adjust_simulator_state(ptgsk, t_fc_ens_start, q_obs_m3s_ts)
    q_obs_m3s_ts = observed_tistel_discharge(display_time_axis.total_period())
    ens_repos = tistel.arome_ensemble_repository(tistel.grid_spec)
    ptgsk_fc_ens = create_tistel_simulator(PTGSKModel, ens_repos)
    sims = ptgsk_fc_ens.create_ensembles(fc_ens_time_axis, t_fc_ens_start, current_state)
    for sim in sims:
        sim.simulate()
    plt.hold(1)
    percentiles = [10, 25, 50, 75, 90]
    plot_percentiles(sims, percentiles, obs=q_obs_m3s_ts)
    #plt.interactive(1)
    plt.show()
Exemplo n.º 28
0
 def setUp(self):
     self.c = api.Calendar()
     self.d = api.deltahours(1)
     self.n = 24
     # self.t= self.c.trim(api.utctime_now(),self.d)
     self.t = self.c.trim(self.c.time(1969, 12, 31, 0, 0, 0), self.d)
     self.ta = api.TimeAxis(self.t, self.d, self.n)
Exemplo n.º 29
0
    def fetch_sources(self, input_source_types, data, params, period):
        """Method for fetching the sources in NetCDF files.

        Parameters
        ----------
        input_source_types : dict
            A map between the data to be extracted and the data containers in shyft.api.
        data : dict
            An geo-located time series shyft.api container.
        params : dict
            Additional parameters for locating the datasets.
        period : tuple
            A (start_time, stop_time) tuple that species the simulation period.

        """
        self.__dict__.update(params)
        # Fill the data with actual values
        for input_source, source_api in input_source_types.iteritems():
            ts = self._fetch_station_tseries(input_source, params['types'], period)
            assert type(ts) is list
            tsf = api.TsFactory()
            for station in ts:
                times = station['time']
                assert type(times) is list
                dt = times[1] - times[0] if len(times) > 1 else api.deltahours(1)
                total_period = api.UtcPeriod(times[0], times[-1] + dt)
                time_points = api.UtcTimeVector(times)
                time_points.push_back(total_period.end)
                values = station['values']
                value_points = api.DoubleVector.FromNdArray(values)
                api_ts = tsf.create_time_point_ts(total_period, time_points, value_points)
                data_source = source_api(api.GeoPoint(*station['location']), api_ts)
                data[input_source].append(data_source)
        return data
Exemplo n.º 30
0
    def test_kling_gupta_and_nash_sutcliffe(self):
        """
        Test/verify exposure of the kling_gupta and nash_sutcliffe correlation functions

        """

        def np_nash_sutcliffe(o, p):
            return 1 - (np.sum((o - p) ** 2)) / (np.sum((o - np.mean(o)) ** 2))

        c = api.Calendar()
        t0 = c.time(2016, 1, 1)
        dt = api.deltahours(1)
        n = 240
        ta = api.Timeaxis2(t0, dt, n)
        from math import sin, pi
        rad_max = 10 * 2 * pi
        obs_values = api.DoubleVector.from_numpy(np.array([sin(i * rad_max / n) for i in range(n)]))
        mod_values = api.DoubleVector.from_numpy(np.array([0.1 + sin(pi / 10.0 + i * rad_max / n) for i in range(n)]))
        obs_ts = api.Timeseries(ta=ta, values=obs_values, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)
        mod_ts = api.Timeseries(ta=ta, values=mod_values, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)

        self.assertAlmostEqual(api.kling_gupta(obs_ts, obs_ts, ta, 1.0, 1.0, 1.0), 1.0, None, "1.0 for perfect match")
        self.assertAlmostEqual(api.nash_sutcliffe(obs_ts, obs_ts, ta), 1.0, None, "1.0 for perfect match")
        # verify some non trivial cases, and compare to numpy version of ns
        mod_inv = obs_ts * -1.0
        kge_inv = obs_ts.kling_gupta(mod_inv)  # also show how to use time-series.method itself to ease use
        ns_inv = obs_ts.nash_sutcliffe(mod_inv)  # similar for nash_sutcliffe, you can reach it directly from a ts
        ns_inv2 = np_nash_sutcliffe(obs_ts.values.to_numpy(), mod_inv.values.to_numpy())
        self.assertLessEqual(kge_inv, 1.0, "should be less than 1")
        self.assertLessEqual(ns_inv, 1.0, "should be less than 1")
        self.assertAlmostEqual(ns_inv, ns_inv2, 4, "should equal numpy calculated value")
        kge_obs_mod = api.kling_gupta(obs_ts, mod_ts, ta, 1.0, 1.0, 1.0)
        self.assertLessEqual(kge_obs_mod, 1.0)
        self.assertAlmostEqual(obs_ts.nash_sutcliffe( mod_ts), np_nash_sutcliffe(obs_ts.values.to_numpy(), mod_ts.values.to_numpy()))
Exemplo n.º 31
0
    def test_percentiles(self):
        c = api.Calendar()
        t0 = c.time(2016, 1, 1)
        dt = api.deltahours(1)
        n = 240
        ta = api.Timeaxis(t0, dt, n)
        timeseries = api.TsVector()

        for i in range(10):
            timeseries.append(
                api.Timeseries(ta=ta, fill_value=i, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE))

        wanted_percentiles = api.IntVector([0, 10, 50, -1, 70, 100])
        ta_day = api.Timeaxis(t0, dt * 24, n // 24)
        ta_day2 = api.Timeaxis2(t0, dt * 24, n // 24)
        percentiles = api.percentiles(timeseries, ta_day, wanted_percentiles)
        percentiles2 = timeseries.percentiles(ta_day2, wanted_percentiles)  # just to verify it works with alt. syntax

        self.assertEqual(len(percentiles2), len(percentiles))

        for i in range(len(ta_day)):
            self.assertAlmostEqual(0.0, percentiles[0].value(i), 3, "  0-percentile")
            self.assertAlmostEqual(0.9, percentiles[1].value(i), 3, " 10-percentile")
            self.assertAlmostEqual(4.5, percentiles[2].value(i), 3, " 50-percentile")
            self.assertAlmostEqual(4.5, percentiles[3].value(i), 3, "   -average")
            self.assertAlmostEqual(6.3, percentiles[4].value(i), 3, " 70-percentile")
            self.assertAlmostEqual(9.0, percentiles[5].value(i), 3, "100-percentile")
Exemplo n.º 32
0
    def test_time_shift(self):
        c=api.Calendar()
        t0=c.time(2016, 1, 1)
        t1=c.time(2017, 1, 1)
        dt=api.deltahours(1)
        n=240
        ta=api.TimeAxisFixedDeltaT(t0, dt, n)
        ts0=api.TimeSeries(ta=ta, fill_value=3.0, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)
        tsa=api.TimeSeries('a')

        ts1=api.time_shift(tsa, t1 - t0)
        self.assertTrue(ts1.needs_bind())
        ts1_blob=ts1.serialize()
        ts1=api.TimeSeries.deserialize(ts1_blob)
        tsb=ts1.find_ts_bind_info()
        self.assertEqual(len(tsb), 1)
        tsb[0].ts.bind(ts0)
        ts1.bind_done()
        self.assertFalse(ts1.needs_bind())

        ts2=2.0*ts1.time_shift(t0 - t1)  # just to verify it still can take part in an expression

        for i in range(ts0.size()):
            self.assertAlmostEqual(ts0.value(i), ts1.value(i), 3, "expect values to be equal")
            self.assertAlmostEqual(ts0.value(i)*2.0, ts2.value(i), 3, "expect values to be double value")
            self.assertEqual(ts0.time(i) + (t1 - t0), ts1.time(i), "expect time to be offset delta_t different")
            self.assertEqual(ts0.time(i), ts2.time(i), "expect time to be equal")
Exemplo n.º 33
0
    def test_create_target_spec_from_std_time_series(self):
        """
        Verify we can create target-spec giving ordinary ts,
        and that passing a non-fixed time-axis raises exception

        """
        cal = api.Calendar()
        ta = api.TimeAxis(cal.time(2017, 1, 1), api.deltahours(1), 24)
        ts = api.TimeSeries(
            ta,
            fill_value=3.0,
            point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)
        cids = api.IntVector([0, 2, 3])
        t0 = api.TargetSpecificationPts(ts, cids, 0.7, api.KLING_GUPTA, 1.0,
                                        1.0, 1.0, api.SNOW_COVERED_AREA,
                                        'test_uid')
        self.assertAlmostEqual(t0.ts.value(0), ts.value(0))
        rid = 0
        t1 = api.TargetSpecificationPts(ts, rid, 0.7, api.KLING_GUPTA, 1.0,
                                        1.0, 1.0, 'test_uid')
        self.assertAlmostEqual(t1.ts.value(0), ts.value(0))
        tax = api.TimeAxis(api.UtcTimeVector.from_numpy(ta.time_points[:-1]),
                           ta.total_period().end)
        tsx = api.TimeSeries(
            tax,
            fill_value=2.0,
            point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)
        tx = api.TargetSpecificationPts(tsx, rid, 0.7, api.KLING_GUPTA, 1.0,
                                        1.0, 1.0, 'test_uid')
        self.assertIsNotNone(tx)
Exemplo n.º 34
0
    def test_tiny_bbox(self):
        EPSG, _, _ = self.senorge_epsg_bbox
        x0 = 374499.5  # lower left
        y0 = 6451499.5  # lower right
        nx = 1.0
        ny = 1.0
        dx = 1.0
        dy = 1.0
        bbox = ([x0, x0 + nx * dx, x0 + nx * dx, x0], [y0, y0, y0 + ny * dy, y0 + ny * dy])
        bpoly = box(min(bbox[0]), min(bbox[1]), max(bbox[0]), max(bbox[1]))

        # Period start
        year = 2015
        month = 1
        n_hours = 30
        date_str = "{}-{:02}".format(year, month)
        utc = api.Calendar()  # No offset gives Utc
        t0 = api.YMDhms(year, month)
        period = api.UtcPeriod(utc.time(t0), utc.time(t0) + api.deltahours(n_hours))

        base_dir = path.join(shyftdata_dir, "repository", "senorge_data_repository", "senorge2")
        filename = "seNorge2_PREC1d_grid_2015.nc"
        reader = SeNorgeDataRepository(EPSG, base_dir, filename=filename,
                                   padding=0)
        data_names = ("precipitation",)

        tss = reader.get_timeseries(data_names, period, geo_location_criteria=bpoly)

        for name, ts in tss.items():
            self.assertTrue(len(ts) == 1)
Exemplo n.º 35
0
 def test_calendar_add_3h_during_dst(self):
     osl = api.Calendar("Europe/Oslo")
     t0 = osl.time(2016, 3, 27)  # dst change during spring
     t1 = osl.add(t0, api.Calendar.DAY,  1)
     dt3h=api.deltahours(3)
     d3h= osl.diff_units(t0,t1,dt3h)
     self.assertEqual(8, d3h)
Exemplo n.º 36
0
    def test_no_point_inside_polygon_bounds(self):
        EPSG, bbox, bpoly = self.arome_epsg_bbox
        bounds = bpoly.bounds
        bpoly = box(bounds[0], 6010000.0, bounds[2], 6035000.0)
        # Period start
        year = 2015
        month = 8
        day = 24
        hour = 6
        n_hours = 30
        date_str = "{}{:02}{:02}_{:02}".format(year, month, day, hour)
        utc = api.Calendar()  # No offset gives Utc
        t0 = api.YMDhms(year, month, day, hour)
        period = api.UtcPeriod(utc.time(t0),
                               utc.time(t0) + api.deltahours(n_hours))

        base_dir = path.join(shyftdata_dir, "repository",
                             "arome_data_repository")
        filename = "arome_metcoop_red_default2_5km_{}.nc".format(date_str)
        reader = MetNetcdfDataRepository(EPSG,
                                         base_dir,
                                         filename=filename,
                                         padding=0.0)
        data_names = ("temperature", "wind_speed", "precipitation",
                      "relative_humidity")
        with self.assertRaises(MetNetcdfDataRepositoryError) as context:
            reader.get_timeseries(data_names,
                                  period,
                                  geo_location_criteria=bpoly)
        self.assertEqual(
            "No points in dataset which are within the bounding box of the geo_location_criteria polygon.",
            context.exception.args[0])
Exemplo n.º 37
0
    def test_kling_gupta_and_nash_sutcliffe(self):
        """
        Test/verify exposure of the kling_gupta and nash_sutcliffe correlation functions

        """

        def np_nash_sutcliffe(o, p):
            return 1 - (np.sum((o - p)**2))/(np.sum((o - np.mean(o))**2))

        c=api.Calendar()
        t0=c.time(2016, 1, 1)
        dt=api.deltahours(1)
        n=240
        ta=api.TimeAxis(t0, dt, n)
        from math import sin, pi
        rad_max=10*2*pi
        obs_values=api.DoubleVector.from_numpy(np.array([sin(i*rad_max/n) for i in range(n)]))
        mod_values=api.DoubleVector.from_numpy(np.array([0.1 + sin(pi/10.0 + i*rad_max/n) for i in range(n)]))
        obs_ts=api.TimeSeries(ta=ta, values=obs_values, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)
        mod_ts=api.TimeSeries(ta=ta, values=mod_values, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)

        self.assertAlmostEqual(api.kling_gupta(obs_ts, obs_ts, ta, 1.0, 1.0, 1.0), 1.0, None, "1.0 for perfect match")
        self.assertAlmostEqual(api.nash_sutcliffe(obs_ts, obs_ts, ta), 1.0, None, "1.0 for perfect match")
        # verify some non trivial cases, and compare to numpy version of ns
        mod_inv=obs_ts*-1.0
        kge_inv=obs_ts.kling_gupta(mod_inv)  # also show how to use time-series.method itself to ease use
        ns_inv=obs_ts.nash_sutcliffe(mod_inv)  # similar for nash_sutcliffe, you can reach it directly from a ts
        ns_inv2=np_nash_sutcliffe(obs_ts.values.to_numpy(), mod_inv.values.to_numpy())
        self.assertLessEqual(kge_inv, 1.0, "should be less than 1")
        self.assertLessEqual(ns_inv, 1.0, "should be less than 1")
        self.assertAlmostEqual(ns_inv, ns_inv2, 4, "should equal numpy calculated value")
        kge_obs_mod=api.kling_gupta(obs_ts, mod_ts, ta, 1.0, 1.0, 1.0)
        self.assertLessEqual(kge_obs_mod, 1.0)
        self.assertAlmostEqual(obs_ts.nash_sutcliffe(mod_ts),
                               np_nash_sutcliffe(obs_ts.values.to_numpy(), mod_ts.values.to_numpy()))
Exemplo n.º 38
0
 def test_abs(self):
     c=api.Calendar()
     t0=c.time(2016, 1, 1)
     dt=api.deltahours(1)
     n=4
     v=api.DoubleVector([1.0, -1.5, float("nan"), 3.0])
     ta=api.TimeAxisFixedDeltaT(t0, dt, n)
     ts0=api.TimeSeries(ta=ta, values=v, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)
     tsa=api.TimeSeries('a')
     ts1=tsa.abs()
     ts1_blob=ts1.serialize()
     ts1=api.TimeSeries.deserialize(ts1_blob)
     self.assertTrue(ts1.needs_bind())
     bts=ts1.find_ts_bind_info()
     self.assertEqual(len(bts), 1)
     bts[0].ts.bind(ts0)
     ts1.bind_done()
     self.assertFalse(ts1.needs_bind())
     self.assertAlmostEqual(ts0.value(0), ts1.value(0), 6)
     self.assertAlmostEqual(abs(ts0.value(1)), ts1.value(1), 6)
     self.assertTrue(math.isnan(ts1.value(2)))
     self.assertAlmostEqual(ts0.value(3), ts1.value(3), 6)
     tsv0=api.TsVector()
     tsv0.append(ts0)
     tsv1=tsv0.abs()
     self.assertAlmostEqual(tsv0[0].value(0), tsv1[0].value(0), 6)
     self.assertAlmostEqual(abs(tsv0[0].value(1)), tsv1[0].value(1), 6)
     self.assertTrue(math.isnan(tsv1[0].value(2)))
     self.assertAlmostEqual(tsv0[0].value(3), tsv1[0].value(3), 6)
Exemplo n.º 39
0
 def test_create_region_environment(self):
     cal = api.Calendar()
     time_axis = api.Timeaxis(cal.time(api.YMDhms(2015, 1, 1, 0, 0, 0)), api.deltahours(1), 240)
     re = self.create_dummy_region_environment(time_axis, api.GeoPoint(1000, 1000, 100))
     self.assertIsNotNone(re)
     self.assertEqual(len(re.radiation), 1)
     self.assertAlmostEqual(re.radiation[0].ts.value(0), 300.0)
Exemplo n.º 40
0
    def test_tiny_bbox(self):
        EPSG, _ = self.arome_epsg_bbox

        x0 = 436250.0   # lower left
        y0 = 6823250.0  # lower right
        nx = 1
        ny = 1
        dx = 5.0
        dy = 5.0
        bbox = ([x0, x0 + nx*dx, x0 + nx*dx, x0], [y0, y0, y0 + ny*dy, y0 + ny*dy])
        print(bbox)
        
        # Period start
        year = 2015
        month = 8
        day = 24
        hour = 6
        n_hours = 30
        date_str = "{}{:02}{:02}_{:02}".format(year, month, day, hour)
        utc = api.Calendar()  # No offset gives Utc
        t0 = api.YMDhms(year, month, day, hour)
        period = api.UtcPeriod(utc.time(t0), utc.time(t0) + api.deltahours(n_hours))

        base_dir = path.join(shyftdata_dir, "repository", "arome_data_repository")
        filename = "arome_metcoop_red_default2_5km_{}.nc".format(date_str)
        reader = AromeDataRepository(EPSG, base_dir, filename=filename, 
                                     bounding_box=bbox, x_padding=0, y_padding=0)
        data_names = ("temperature", "wind_speed", "precipitation", "relative_humidity")
        try:
            tss = reader.get_timeseries(data_names, period, None)
        except AromeDataRepository as err:
            self.fail("reader.get_timeseries raised AromeDataRepositoryError('{}') "
                      "unexpectedly.".format(err.args[0]))
Exemplo n.º 41
0
 def setUp(self):
     self.c=api.Calendar()
     self.d=api.deltahours(1)
     self.n=24
     #self.t= self.c.trim(api.utctime_now(),self.d)
     self.t= self.c.trim(self.c.time(api.YMDhms(1969,12,31,0,0,0)),self.d)
     self.ta=api.Timeaxis(self.t,self.d,self.n)
Exemplo n.º 42
0
    def test_percentiles(self):
        c=api.Calendar()
        t0=c.time(2016, 1, 1)
        dt=api.deltahours(1)
        n=240
        ta=api.TimeAxisFixedDeltaT(t0, dt, n)
        timeseries=api.TsVector()

        for i in range(10):
            timeseries.append(
                api.TimeSeries(ta=ta, fill_value=i, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE))

        wanted_percentiles=api.IntVector([api.statistics_property.MIN_EXTREME,
                                          0, 10, 50,
                                          api.statistics_property.AVERAGE,
                                          70, 100,
                                          api.statistics_property.MAX_EXTREME])
        ta_day=api.TimeAxisFixedDeltaT(t0, dt*24, n//24)
        ta_day2=api.TimeAxis(t0, dt*24, n//24)
        percentiles=api.percentiles(timeseries, ta_day, wanted_percentiles)
        percentiles2=timeseries.percentiles(ta_day2, wanted_percentiles)  # just to verify it works with alt. syntax

        self.assertEqual(len(percentiles2), len(percentiles))

        for i in range(len(ta_day)):
            self.assertAlmostEqual(0.0, percentiles[0].value(i), 3, "min-extreme ")
            self.assertAlmostEqual(0.0, percentiles[1].value(i), 3, "  0-percentile")
            self.assertAlmostEqual(0.9, percentiles[2].value(i), 3, " 10-percentile")
            self.assertAlmostEqual(4.5, percentiles[3].value(i), 3, " 50-percentile")
            self.assertAlmostEqual(4.5, percentiles[4].value(i), 3, "   -average")
            self.assertAlmostEqual(6.3, percentiles[5].value(i), 3, " 70-percentile")
            self.assertAlmostEqual(9.0, percentiles[6].value(i), 3, "100-percentile")
            self.assertAlmostEqual(9.0, percentiles[7].value(i), 3, "max-extreme")
Exemplo n.º 43
0
    def test_subsets(self):
        EPSG, bbox = self.arome_epsg_bbox
        # Period start
        year = 2015
        month = 8
        day = 24
        hour = 6
        n_hours = 30
        date_str = "{}{:02}{:02}_{:02}".format(year, month, day, hour)
        utc = api.Calendar()  # No offset gives Utc
        t0 = api.YMDhms(year, month, day, hour)
        period = api.UtcPeriod(utc.time(t0), utc.time(t0) + api.deltahours(n_hours))

        base_dir = path.join(shyftdata_dir, "repository", "arome_data_repository")
        filename = "arome_metcoop_red_default2_5km_{}.nc".format(date_str)

        data_names = ("temperature", "wind_speed", "precipitation", "relative_humidity", "radiation")
        allow_subset = False
        reader = AromeDataRepository(EPSG, base_dir, filename=filename,
                                     bounding_box=bbox, allow_subset=allow_subset)
        with self.assertRaises(AromeDataRepositoryError) as context:
            reader.get_timeseries(data_names, period, None)
        self.assertEqual("Could not find all data fields", context.exception.args[0])
        allow_subset = True
        reader = AromeDataRepository(EPSG, base_dir, filename=filename,
                                     bounding_box=bbox, allow_subset=allow_subset)
        try:
            sources = reader.get_timeseries(data_names, period, None)
        except AromeDataRepositoryError as e:
            self.fail("AromeDataRepository.get_timeseries(data_names, period, None) "
                      "raised AromeDataRepositoryError unexpectedly.")
        self.assertEqual(len(sources), len(data_names) - 1)
Exemplo n.º 44
0
def ensemble_demo():
    utc = Calendar()
    t_start = utc.time(YMDhms(2011, 9, 1))
    t_fc_ens_start = utc.time(YMDhms(2015, 7, 26))
    disp_start = utc.time(YMDhms(2015, 7, 20))
    dt = deltahours(1)
    n_obs = int(round((t_fc_ens_start - t_start) / dt))
    n_fc_ens = 30
    n_disp = int(round(t_fc_ens_start - disp_start) / dt) + n_fc_ens + 24 * 7

    obs_time_axis = Timeaxis(t_start, dt, n_obs + 1)
    fc_ens_time_axis = Timeaxis(t_fc_ens_start, dt, n_fc_ens)
    display_time_axis = Timeaxis(disp_start, dt, n_disp)

    q_obs_m3s_ts = observed_tistel_discharge(obs_time_axis.total_period())
    ptgsk = create_tistel_simulator(
        PTGSKOptModel, tistel.geo_ts_repository(tistel.grid_spec.epsg()))
    initial_state = burn_in_state(ptgsk, t_start, utc.time(YMDhms(2012, 9, 1)),
                                  q_obs_m3s_ts)

    ptgsk.run(obs_time_axis, initial_state)
    current_state = adjust_simulator_state(ptgsk, t_fc_ens_start, q_obs_m3s_ts)
    q_obs_m3s_ts = observed_tistel_discharge(display_time_axis.total_period())
    ens_repos = tistel.arome_ensemble_repository(tistel.grid_spec)
    ptgsk_fc_ens = create_tistel_simulator(PTGSKModel, ens_repos)
    sims = ptgsk_fc_ens.create_ensembles(fc_ens_time_axis, t_fc_ens_start,
                                         current_state)
    for sim in sims:
        sim.simulate()
    plt.hold(1)
    percentiles = [10, 25, 50, 75, 90]
    plot_percentiles(sims, percentiles, obs=q_obs_m3s_ts)
    plt.interactive(1)
    plt.show()
Exemplo n.º 45
0
 def test_calendar_add_3h_during_dst(self):
     osl = api.Calendar("Europe/Oslo")
     t0 = osl.time(2016, 3, 27)  # dst change during spring
     t1 = osl.add(t0, api.Calendar.DAY, 1)
     dt3h = api.deltahours(3)
     d3h = osl.diff_units(t0, t1, dt3h)
     self.assertEqual(8, d3h)
Exemplo n.º 46
0
    def test_subsets(self):
        EPSG, bbox, bpoly = self.senorge_epsg_bbox
        # Period start
        year = 2015
        month = 1
        n_hours = 30
        date_str = "{}-{:02}".format(year, month)
        utc = api.Calendar()  # No offset gives Utc
        t0 = api.YMDhms(year, month)
        period = api.UtcPeriod(utc.time(t0), utc.time(t0) + api.deltahours(n_hours))

        base_dir = path.join(shyftdata_dir, "repository", "senorge_data_repository", "senorge2")
        filename = "seNorge2_PREC1d_grid_2015.nc"

        data_names = ("precipitation","foo")
        allow_subset = False
        reader = SeNorgeDataRepository(EPSG, base_dir, filename=filename,
                                   allow_subset=allow_subset)
        with self.assertRaises(SeNorgeDataRepositoryError) as context:
            reader.get_timeseries(data_names, period, geo_location_criteria=bpoly)
        self.assertEqual("Could not find all data fields", context.exception.args[0])
        allow_subset = True
        reader = SeNorgeDataRepository(EPSG, base_dir, filename=filename,
                                   allow_subset=allow_subset)
        try:
            sources = reader.get_timeseries(data_names, period, geo_location_criteria=bpoly)
        except SeNorgeDataRepositoryError as e:
            self.fail("AromeDataRepository.get_timeseries(data_names, period, None) "
                      "raised AromeDataRepositoryError unexpectedly.")
        self.assertEqual(len(sources), len(data_names)-1)
Exemplo n.º 47
0
    def test_integral(self):
        c=api.Calendar()
        t0=c.time(2016, 1, 1)
        dt=api.deltahours(1)
        n=240
        ta=api.TimeAxis(t0, dt, n)
        fill_value=1.0
        ts=api.TimeSeries(ta=ta, fill_value=fill_value, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)
        tsa=api.TimeSeries('a')*1.0 + 0.0  # expression, needing bind
        tsb=api.TimeSeries('b')*1.0 + 0.0  # another expression, needing bind for different ts
        ts_i1=tsa.integral(ta)
        ts_i2=api.integral(tsb, ta)
        # circulate through serialization
        ts_i1_blob=ts_i1.serialize()
        ts_i2_blob=ts_i2.serialize()
        ts_i1=api.TimeSeries.deserialize(ts_i1_blob)
        ts_i2=api.TimeSeries.deserialize(ts_i2_blob)

        for ts_i in [ts_i1, ts_i2]:
            self.assertTrue(ts_i.needs_bind())
            tsb=ts_i.find_ts_bind_info()
            self.assertEqual(len(tsb), 1)
            tsb[0].ts.bind(ts)
            ts_i.bind_done()
            self.assertFalse(ts_i.needs_bind())

        ts_i1_values=ts_i1.values
        for i in range(n):
            expected_value=dt*fill_value
            self.assertAlmostEqual(expected_value, ts_i1.value(i), 4, "expect integral of each interval")
            self.assertAlmostEqual(expected_value, ts_i2.value(i), 4, "expect integral of each interval")
            self.assertAlmostEqual(expected_value, ts_i1_values[i], 4, "expect integral of each interval")
Exemplo n.º 48
0
    def test_get_dummy(self):
        """
        #Simple regression test of WRF data repository.
        """
        EPSG, bbox, bpoly = self.senorge_epsg_bbox

        # Period start
        n_days = 5
        t0 = api.YMDhms(2015, 2)
        date_str = "{}-{:02}".format(t0.year, t0.month)
        utc = api.Calendar()  # No offset gives Utc
        period = api.UtcPeriod(utc.time(t0), utc.time(t0) + api.deltahours(n_days * 24))

        base_dir = path.join(shyftdata_dir, "repository", "senorge_data_repository", "senorge2")
        f1 = "seNorge2_PREC1d_grid_2015.nc"

        senorge1 = SeNorgeDataRepository(EPSG, base_dir, filename=f1, allow_subset=True)
        senorge1_data_names = ("radiation",)
        sources = senorge1.get_timeseries(senorge1_data_names, period, geo_location_criteria=bpoly)
        self.assertTrue(len(sources) > 0)

        self.assertTrue(set(sources) == set(senorge1_data_names))
        p0 = sources["radiation"][0].ts
        self.assertTrue(p0.size() == n_days)
        self.assertTrue(p0.time(0), period.start)
Exemplo n.º 49
0
 def test_UtcPeriod_to_string(self):
     c1 = api.YMDhms(2000, 1, 2, 3, 4, 5)
     t = self.utc.time(c1)
     p = api.UtcPeriod(t, t + api.deltahours(1))
     s = p.to_string()
     self.assertEqual(s, "[2000-01-02T03:04:05Z,2000-01-02T04:04:05Z>")
     s2 = self.std.to_string(p)
     self.assertEqual(s2, "[2000-01-02T04:04:05+01,2000-01-02T05:04:05+01>")
Exemplo n.º 50
0
 def test_source_uid(self):
     cal = api.Calendar()
     time_axis = api.Timeaxis(cal.time(api.YMDhms(2015, 1, 1, 0, 0, 0)), api.deltahours(1), 240)
     mid_point = api.GeoPoint(1000, 1000, 100)
     precip_source = self._create_constant_geo_ts(api.PrecipitationSource, mid_point, time_axis.total_period(), 5.0)
     self.assertIsNotNone(precip_source.uid)
     precip_source.uid = 'abc'
     self.assertEqual(precip_source.uid, 'abc')
Exemplo n.º 51
0
 def test_wrong_file(self):
     with self.assertRaises(AromeDataRepositoryError) as context:
         utc = api.Calendar()  # No offset gives Utc
         t0 = api.YMDhms(2015, 12, 25, 18)
         period = api.UtcPeriod(utc.time(t0), utc.time(t0) + api.deltahours(30))
         ar1 = AromeDataRepository(32632, shyftdata_dir, filename="plain_wrong.nc")
         ar1.get_timeseries(("temperature",), period, None)
     self.assertTrue(all(x in context.exception.args[0] for x in ["File", "not found"]))
Exemplo n.º 52
0
    def test_UtcPeriod_to_string(self):
        c1 = api.YMDhms(2000,01,02,03,04,05);
        t = self.utc.time(c1);
        p= api.UtcPeriod(t,t+api.deltahours(1))
        s = p.to_string()
        self.assertEqual(s, "[2000.01.02T03:04:05,2000.01.02T04:04:05>")
        s2= self.utc.to_string(p)

        self.assertEqual(s2, "[2000.01.02T03:04:05,2000.01.02T04:04:05>")
Exemplo n.º 53
0
    def test_create_TargetSpecificationPts(self):
        t = api.TargetSpecificationPts()
        t.scale_factor = 1.0
        t.calc_mode = api.NASH_SUTCLIFFE
        t.calc_mode = api.KLING_GUPTA
        t.s_r = 1.0  # KGEs scale-factors
        t.s_a = 2.0
        t.s_b = 3.0
        self.assertIsNotNone(t.uid)
        t.uid = 'test'
        self.assertEqual(t.uid,'test')
        self.assertAlmostEqual(t.scale_factor, 1.0)
        # create a ts with some points
        cal = api.Calendar()
        start = cal.time(api.YMDhms(2015, 1, 1, 0, 0, 0))
        dt = api.deltahours(1)
        tsf = api.TsFactory()
        times = api.UtcTimeVector()
        times.push_back(start + 1 * dt)
        times.push_back(start + 3 * dt)
        times.push_back(start + 4 * dt)

        values = api.DoubleVector()
        values.push_back(1.0)
        values.push_back(3.0)
        values.push_back(np.nan)
        tsp = tsf.create_time_point_ts(api.UtcPeriod(start, start + 24 * dt), times, values)
        # convert it from a time-point ts( as returned from current smgrepository) to a fixed interval with timeaxis, needed by calibration
        tst = api.TsTransform()
        tsa = tst.to_average(start, dt, 24, tsp)
        # tsa2 = tst.to_average(start,dt,24,tsp,False)
        # tsa_staircase = tst.to_average_staircase(start,dt,24,tsp,False) # nans infects the complete interval to nan
        # tsa_staircase2 = tst.to_average_staircase(start,dt,24,tsp,True) # skip nans, nans are 0
        # stuff it into the target spec.
        # also show how to specify snow-calibration
        cids = api.IntVector([0, 2, 3])
        t2 = api.TargetSpecificationPts(tsa, cids, 0.7, api.KLING_GUPTA, 1.0, 1.0, 1.0, api.SNOW_COVERED_AREA,'test_uid')
        self.assertEqual(t2.uid,'test_uid')
        t2.catchment_property = api.SNOW_WATER_EQUIVALENT
        self.assertEqual(t2.catchment_property, api.SNOW_WATER_EQUIVALENT)
        self.assertIsNotNone(t2.catchment_indexes)
        for i in range(len(cids)):
            self.assertEqual(cids[i], t2.catchment_indexes[i])
        t.ts = tsa
        # TODO: Does not work, list of objects are not yet convertible tv = api.TargetSpecificationVector([t, t2])
        tv = api.TargetSpecificationVector()
        tv.append(t)
        tv.append(t2)
        # now verify we got something ok
        self.assertEqual(2, tv.size())
        self.assertAlmostEqual(tv[0].ts.value(1), 1.5)  # average value 0..1 ->0.5
        self.assertAlmostEqual(tv[0].ts.value(2), 2.5)  # average value 0..1 ->0.5
        self.assertAlmostEqual(tv[0].ts.value(3), 3.0)  # average value 0..1 ->0.5
        # and that the target vector now have its own copy of ts
        tsa.set(1, 3.0)
        self.assertAlmostEqual(tv[0].ts.value(1), 1.5)  # make sure the ts passed onto target spec, is a copy
        self.assertAlmostEqual(tsa.value(1), 3.0)  # and that we really did change the source
Exemplo n.º 54
0
def burn_in_state(simulator, t_start, t_stop, q_obs_m3s_ts):
    dt = deltahours(1)
    n = int(round((t_stop - t_start)/dt))
    time_axis = Timeaxis(t_start, dt, n)
    n_cells = simulator.region_model.size()
    state_repos = DefaultStateRepository(simulator.region_model.__class__, n_cells)
    simulator.run(time_axis, state_repos.get_state(0))
    # Go back in time (to t_start) and adjust q with observed discharge at that time.
    # This will give us a good initial state at t_start
    return adjust_simulator_state(simulator, t_start, q_obs_m3s_ts)
Exemplo n.º 55
0
 def test_calendar_add_and_diff_units(self):
     osl = api.Calendar("Europe/Oslo")
     t0 = osl.time(2016, 6, 1, 12, 0, 0)
     t1 = osl.add(t0, api.Calendar.DAY, 7)
     t2 = osl.add(t1, api.Calendar.WEEK, -1)
     self.assertEqual(t0, t2)
     self.assertEqual(7, osl.diff_units(t0, t1, api.Calendar.DAY))
     self.assertEqual(1, osl.diff_units(t0, t1, api.Calendar.WEEK))
     self.assertEqual(0, osl.diff_units(t0, t1, api.Calendar.MONTH))
     self.assertEqual(7*24, osl.diff_units(t0, t1, api.deltahours(1)))
Exemplo n.º 56
0
    def setUp(self):
        self.cal = api.Calendar()
        self.dt = api.deltahours(1)
        self.nt = 24*10
        self.t0 = self.cal.time(2016, 1, 1)
        self.ta = api.Timeaxis2(self.t0, self.dt, self.nt)
        self.ta1 = api.Timeaxis(self.t0, self.dt, self.nt)

        self.geo_points = api.GeoPointVector()
        self.geo_points.append(api.GeoPoint( 100,  100, 1000))
        self.geo_points.append(api.GeoPoint(5100,  100, 1150))
        self.geo_points.append(api.GeoPoint( 100, 5100,  850))
Exemplo n.º 57
0
    def test_swig_python_time(self):
        """
        This particular test is here to point out a platform specific bug detected
        on windows.

        """
        c1 = api.YMDhms(1969, 12, 31, 23, 0, 0)
        t = self.utc.time(c1)  # at this point, the value returned from c++ is correct, but on its
        # way through swig layer to python it goes via int32 and then to int64, proper signhandling
        #
        t_str = self.utc.to_string(t)  # this one just to show it's still working as it should internally
        self.assertEquals(t_str, "1969-12-31T23:00:00Z")
        self.assertEquals(t, api.deltahours(-1))
Exemplo n.º 58
0
 def test_AverageAccessor(self):
     dv=np.arange(self.ta.size())
     v=api.DoubleVector.FromNdArray(dv)
     t=api.UtcTimeVector();
     for i in xrange(self.ta.size()):
         t.push_back(self.ta(i).start)
     t.push_back(self.ta(self.ta.size()-1).end) #important! needs n+1 points to determine n periods in the timeaxis
     tsf=api.TsFactory()
     ts1=tsf.create_point_ts(self.ta.size(), self.t, self.d, v)
     ts2=tsf.create_time_point_ts(self.ta.total_period(),t,v)
     tax=api.Timeaxis(self.ta.start()+api.deltaminutes(30),api.deltahours(1),self.ta.size())
     avg1=api.AverageAccessorTs(ts1,tax)
     self.assertEquals(avg1.size(),tax.size())