Esempio n. 1
0
    def test_percentiles(self):
        c = api.Calendar()
        t0 = c.time(2016, 1, 1)
        dt = api.deltahours(1)
        n = 240
        ta = api.Timeaxis(t0, dt, n)
        timeseries = api.TsVector()

        for i in range(10):
            timeseries.append(
                api.Timeseries(ta=ta, fill_value=i, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE))

        wanted_percentiles = api.IntVector([0, 10, 50, -1, 70, 100])
        ta_day = api.Timeaxis(t0, dt * 24, n // 24)
        ta_day2 = api.Timeaxis2(t0, dt * 24, n // 24)
        percentiles = api.percentiles(timeseries, ta_day, wanted_percentiles)
        percentiles2 = timeseries.percentiles(ta_day2, wanted_percentiles)  # just to verify it works with alt. syntax

        self.assertEqual(len(percentiles2), len(percentiles))

        for i in range(len(ta_day)):
            self.assertAlmostEqual(0.0, percentiles[0].value(i), 3, "  0-percentile")
            self.assertAlmostEqual(0.9, percentiles[1].value(i), 3, " 10-percentile")
            self.assertAlmostEqual(4.5, percentiles[2].value(i), 3, " 50-percentile")
            self.assertAlmostEqual(4.5, percentiles[3].value(i), 3, "   -average")
            self.assertAlmostEqual(6.3, percentiles[4].value(i), 3, " 70-percentile")
            self.assertAlmostEqual(9.0, percentiles[5].value(i), 3, "100-percentile")
Esempio n. 2
0
 def setUp(self):
     self.c=api.Calendar()
     self.d=api.deltahours(1)
     self.n=24
     #self.t= self.c.trim(api.utctime_now(),self.d)
     self.t= self.c.trim(self.c.time(api.YMDhms(1969,12,31,0,0,0)),self.d)
     self.ta=api.Timeaxis(self.t,self.d,self.n)
Esempio n. 3
0
def create_mock_station_data(t0, dt, n_steps, **kwargs):
    time_axis = api.Timeaxis(t0, dt, n_steps)
    return {"temperature": create_mock_time_series_data("temperature", time_axis, **kwargs),
            "precipitation": create_mock_time_series_data("precipitation", time_axis, **kwargs),
            "relative_humidity": create_mock_time_series_data("relative_humidity", time_axis, **kwargs),
            "wind_speed": create_mock_time_series_data("wind_speed", time_axis, **kwargs),
            "radiation": create_mock_time_series_data("radiation", time_axis, **kwargs)}
Esempio n. 4
0
    def test_can_run_bayesian_kriging_from_arome25_to_1km(self):
        """
        Verify that if we run btk interpolation, we do get updated time-series according to time-axis and range
        specified.

        """
        # arrange the test with a btk_parameter, a source grid and a destination grid
        btk_parameter = api.BTKParameter(temperature_gradient=-0.6,
                                         temperature_gradient_sd=0.25,
                                         sill=25.0,
                                         nugget=0.5,
                                         range=20000.0,
                                         zscale=20.0)
        fx = lambda z: api.DoubleVector.from_numpy(
            (20.0 - 0.6 * z / 100) + 3.0 * np.sin(
                np.arange(start=0, stop=self.n, step=1) * 2 * np.pi / 24.0 - np
                .pi / 2.0))
        arome_grid = self._create_geo_ts_grid(self.nx, self.ny, self.dx_arome,
                                              fx)
        destination_grid = self._create_geo_point_grid(self.mnx, self.mny,
                                                       self.dx_model)
        ta = api.Timeaxis(self.t, self.d * 3, int(self.n / 3))
        # act, - run the bayesian_kriging_temperature algoritm.
        r = api.bayesian_kriging_temperature(arome_grid, destination_grid, ta,
                                             btk_parameter)
        # assert
        self.assertIsNotNone(r)
        self.assertEqual(len(r), self.mnx * self.mny)
        for gts in r:  # do some sanity checks for the btk. Note that full-range checking is already done elsewhere
            self.assertEqual(gts.ts.size(), ta.size())
            self.assertLess(np.max(gts.ts.values.to_numpy()),
                            23.0)  # all values less than ~max
            self.assertGreater(np.min(gts.ts.values.to_numpy()),
                               7.0)  # all values larger than ~ min
Esempio n. 5
0
 def test_create_region_environment(self):
     cal = api.Calendar()
     time_axis = api.Timeaxis(cal.time(api.YMDhms(2015, 1, 1, 0, 0, 0)), api.deltahours(1), 240)
     re = self.create_dummy_region_environment(time_axis, api.GeoPoint(1000, 1000, 100))
     self.assertIsNotNone(re)
     self.assertEqual(len(re.radiation), 1)
     self.assertAlmostEqual(re.radiation[0].ts.value(0), 300.0)
Esempio n. 6
0
    def _transform_raw(self, data, time):

        #def noop_time(t):
        #    return api.Timeaxis(api.utctime(t[0]), api.timespan(t[1] - t[0]), len(t))

        def noop_space(x):
            return x

        def air_temp_conv(x):
            return x - 273.15

        def prec_conv(x):
            return x * 3600

        convert_map = {
            "wind_speed": lambda x, ta: (noop_space(x), ta),
            "radiation": lambda x, ta: (noop_space(x), ta),
            "temperature": lambda x, ta: (air_temp_conv(x), ta),
            "precipitation": lambda x, ta: (prec_conv(x), ta),
            "relative_humidity": lambda x, ta: (noop_space(x), ta)
        }

        ta = api.Timeaxis(int(time[0]), int(time[1] - time[0]), len(time))
        res = {}
        for k, v in data.items():
            res[k] = convert_map[k](v, ta)
        return res
Esempio n. 7
0
    def __init__(self, config_file, config_section, **kwargs):
        """
        Setup a config instance for a netcdf orchestration from a YAML file.

        Parameters
        ----------
        config_file : string
          Path to the YAML configuration file
        config_section : string
          Section in YAML file for simulation parameters.

        Returns
        -------
        YAMLConfig instance
        """
        # The config_file needs to be an absolute path or have 'config_dir'
        if os.path.isabs(config_file):
            self._config_file = config_file
            self.config_dir = os.path.dirname(config_file)
        elif "config_dir" in kwargs:
            self._config_file = os.path.join(kwargs["config_dir"], config_file)
        else:
            raise ConfigError("'config_file' must be an absolute path "
                              "or 'config_dir' passed as an argument")

        self._config_section = config_section

        # Load main configuration file
        with open(self._config_file) as cfg:
            config = yaml.load(cfg)[config_section]
        # Expose all keys in yaml file as attributes
        self.__dict__.update(config)
        # Override the parameters with kwargs
        self.__dict__.update(kwargs)

        # Check validity of some attributes
        if not hasattr(self, "config_dir"):
            raise ConfigError("'config_dir' must be present in config section "
                              "or passed as an argument")
        if not (os.path.isdir(self.config_dir)
                and os.path.isabs(self.config_dir)):
            raise ConfigError(
                "'config_dir' must exist and be an absolute path")
        if not hasattr(self, "data_dir"):
            raise ConfigError("'data_dir' must be present in config section "
                              "or passed as an argument")
        if not (os.path.isdir(self.data_dir) and os.path.isabs(self.data_dir)):
            raise ConfigError("'data_dir' must exist and be an absolute path")

        # Create a time axis
        self.start_time = utctime_from_datetime(self.start_datetime)
        self.time_axis = api.Timeaxis(self.start_time, self.run_time_step,
                                      self.number_of_steps)
        # Get the region model in API (already an object if in kwargs)
        if 'model_t' not in kwargs:
            module, model_t = self.model_t.split(".")
            self.model_t = getattr(globals()[module], model_t)
Esempio n. 8
0
    def test_compute_lwc_percentiles(self):
        # Simulation time axis
        year, month, day, hour = 2010, 9, 1, 0
        dt = api.deltahours(24)
        n_steps = 400
        utc = api.Calendar()  # No offset gives Utc
        t0 = utc.time(api.YMDhms(year, month, day, hour))
        time_axis = api.Timeaxis(t0, dt, n_steps)

        # Some fake ids
        region_id = 0
        interpolation_id = 0

        # Simulation coordinate system
        epsg = "32633"

        # Model
        model_t = pt_gs_k.PTGSKModel

        # Configs and repositories
        dataset_config_file = path.join(path.dirname(__file__), "netcdf",
                                        "atnsjoen_datasets.yaml")
        region_config_file = path.join(path.dirname(__file__), "netcdf",
                                       "atnsjoen_calibration_region.yaml")
        region_config = RegionConfig(region_config_file)
        model_config = ModelConfig(self.model_config_file)
        dataset_config = YamlContent(dataset_config_file)
        region_model_repository = RegionModelRepository(
            region_config, model_config, model_t, epsg)
        interp_repos = InterpolationParameterRepository(model_config)
        netcdf_geo_ts_repos = []
        for source in dataset_config.sources:
            station_file = source["params"]["stations_met"]
            netcdf_geo_ts_repos.append(
                GeoTsRepository(source["params"], station_file, ""))
        geo_ts_repository = GeoTsRepositoryCollection(netcdf_geo_ts_repos)

        # Construct target discharge series
        simulator = DefaultSimulator(region_id, interpolation_id,
                                     region_model_repository,
                                     geo_ts_repository, interp_repos, None)
        n_cells = simulator.region_model.size()
        state_repos = DefaultStateRepository(model_t, n_cells)
        cid = 1
        simulator.region_model.set_state_collection(cid, True)
        simulator.run(time_axis, state_repos.get_state(0))

        percentile_list = [10, 25, 50, 75, 90]
        # From here, things could be calculated without copies (except for 't')
        # TODO: Graham optimize with numba :-)
        cells = simulator.region_model.get_cells()
        lwcs = [np.array(cell.sc.gs_lwc.v) for cell in cells]  # Contiguous
        t = np.array([
            cells[0].sc.gs_lwc.time(i)
            for i in range(cells[0].sc.gs_lwc.size())
        ])
        percentiles = np.percentile(np.array(lwcs), percentile_list, 0)
Esempio n. 9
0
    def __init__(self, config_file, config_section, overrides=None):
        """
        Setup a config instance for a netcdf orchestration from a YAML file.

        Parameters
        ----------
        config_file : string
          Path to the YAML configuration file
        config_section : string
          Section in YAML file for simulation parameters.

        Returns
        -------
        YAMLConfig instance
        """
        if overrides is None:
            overrides = {}
        # The config_file needs to be an absolute path
        if os.path.isabs(config_file):
            self._config_file = config_file
            self.config_dir = os.path.dirname(config_file)
        else:
            raise ConfigError("'config_file' must be an absolute path ")

        self._config_section = config_section

        # Load main configuration file
        with open(self._config_file, encoding='utf8') as cfg:
            config = yaml.load(cfg)[config_section]
        # Expose all keys in yaml file as attributes
        self.__dict__.update(config)
        # Override the parameters with kwargs
        #self.__dict__.update(kwargs)
        self.__dict__.update(overrides.get("config", {}))

        self.validate()

        # Create a time axis
        # It is assumed that the time specified in the config file is in UTC
        self.start_time = utctime_from_datetime(self.start_datetime)
        self.time_axis = api.Timeaxis(self.start_time, self.run_time_step,
                                      self.number_of_steps)
        # Get the region model in API (already an object if in kwargs)
        #if 'model_t' not in kwargs:
        #    module, model_t = self.model_t.split(".")
        #    self.model_t = getattr(globals()[module], model_t)

        # If region and interpolation ids are not present, just use fake ones
        # self.region_id = 0 if not hasattr(self, "region_id") else int(self.region_id)
        self.region_model_id = str(self.region_model_id)
        self.interpolation_id = 0 if not hasattr(self, "interpolation_id") \
                           else int(self.interpolation_id)
        self.initial_state_repo = None
        self.end_state_repo = None

        self.construct_repos(overrides)
Esempio n. 10
0
    def _convert_to_timeseries(self, data, t, ts_id):
        ta = api.Timeaxis(int(t[0]), int(t[1]) - int(t[0]), len(t))
        tsc = api.TsFactory().create_point_ts

        def construct(d):
            return tsc(ta.size(), ta.start, ta.delta_t,
                       api.DoubleVector.FromNdArray(d))

        ts = [construct(data[:, j]) for j in range(data.shape[-1])]
        return {k: v for k, v in zip(ts_id, ts)}
Esempio n. 11
0
 def test_create_timeaxis(self):
     self.assertEqual(self.ta.size(),self.n)
     self.assertEqual(len(self.ta),self.n)
     self.assertEqual(self.ta(0).start,self.t)
     self.assertEqual(self.ta(0).end,self.t+self.d)
     self.assertEqual(self.ta(1).start,self.t+self.d)
     self.assertEqual(self.ta.total_period().start,self.t)
     va=np.array([86400,3600,3],dtype=np.int64)
     xta = api.Timeaxis(int(va[0]), int(va[1]), int(va[2]))
     #xta = api.Timeaxis(va[0], va[1], va[2])# TODO: boost.python require this to be int, needs overload for np.int64 types..
     #xta = api.Timeaxis(86400,3600,3)
     self.assertEqual(xta.size(),3)
Esempio n. 12
0
    def setUp(self):
        self.cal = api.Calendar()
        self.dt = api.deltahours(1)
        self.nt = 24 * 10
        self.t0 = self.cal.time(2016, 1, 1)
        self.ta = api.Timeaxis2(self.t0, self.dt, self.nt)
        self.ta1 = api.Timeaxis(self.t0, self.dt, self.nt)

        self.geo_points = api.GeoPointVector()
        self.geo_points.append(api.GeoPoint(100, 100, 1000))
        self.geo_points.append(api.GeoPoint(5100, 100, 1150))
        self.geo_points.append(api.GeoPoint(100, 5100, 850))
Esempio n. 13
0
 def test_unit_conversion(self):
     utc = api.Calendar()
     t_num = np.arange(
         -24, 24, 1, dtype=np.float64
     )  # we use both before and after epoch to ensure sign is ok
     t_converted = convert_netcdf_time('hours since 1970-01-01 00:00:00',
                                       t_num)
     t_axis = api.Timeaxis(utc.time(api.YMDhms(1969, 12, 31, 0, 0, 0)),
                           api.deltahours(1), 2 * 24)
     [
         self.assertEqual(t_converted[i],
                          t_axis(i).start) for i in range(t_axis.size())
     ]
Esempio n. 14
0
 def test_average_accessor(self):
     dv = np.arange(self.ta.size())
     v = api.DoubleVector.from_numpy(dv)
     t = api.UtcTimeVector()
     for i in range(self.ta.size()):
         t.push_back(self.ta(i).start)
     t.push_back(
         self.ta(self.ta.size() - 1).end)  # important! needs n+1 points to determine n periods in the timeaxis
     tsf = api.TsFactory()
     ts1 = tsf.create_point_ts(self.ta.size(), self.t, self.d, v)
     ts2 = tsf.create_time_point_ts(self.ta.total_period(), t, v)
     tax = api.Timeaxis(self.ta.total_period().start + api.deltaminutes(30), api.deltahours(1), self.ta.size())
     avg1 = api.AverageAccessorTs(ts1, tax)
     self.assertEqual(avg1.size(), tax.size())
     self.assertIsNotNone(ts2)
Esempio n. 15
0
    def test_run_arome_ensemble(self):
        # Simulation time axis
        year, month, day, hour = 2015, 7, 26, 0
        n_hours = 30
        dt = api.deltahours(1)
        utc = api.Calendar()  # No offset gives Utc
        t0 = utc.time(api.YMDhms(year, month, day, hour))
        time_axis = api.Timeaxis(t0, dt, n_hours)

        # Some dummy ids not needed for the netcdf based repositories
        region_id = 0
        interpolation_id = 0

        # Simulation coordinate system
        epsg = "32633"

        # Model
        model_t = pt_gs_k.PTGSKOptModel

        # Configs and repositories
        region_config = RegionConfig(self.region_config_file)
        model_config = ModelConfig(self.model_config_file)
        region_model_repository = RegionModelRepository(
            region_config, model_config, model_t, epsg)
        interp_repos = InterpolationParameterRepository(model_config)
        base_dir = path.join(shyftdata_dir, "netcdf", "arome")
        pattern = "fc*.nc"
        try:
            geo_ts_repository = AromeDataRepository(epsg,
                                                    base_dir,
                                                    filename=pattern,
                                                    allow_subset=True)
        except Exception as e:
            print("**** test_run_arome_ensemble: Arome data missing or"
                  " wrong, test inconclusive ****")
            print("****{}****".format(e))
            self.skipTest(
                "**** test_run_arome_ensemble: Arome data missing or wrong, test "
                "inconclusive ****\n\t exception:{}".format(e))
        simulator = DefaultSimulator(region_id, interpolation_id,
                                     region_model_repository,
                                     geo_ts_repository, interp_repos, None)
        n_cells = simulator.region_model.size()
        state_repos = DefaultStateRepository(model_t, n_cells)
        simulators = simulator.create_ensembles(time_axis, t0,
                                                state_repos.get_state(0))
        for s in simulators:
            s.simulate()
Esempio n. 16
0
    def test_time_shift(self):
        c = api.Calendar()
        t0 = c.time(2016, 1, 1)
        t1 = c.time(2017, 1, 1)
        dt = api.deltahours(1)
        n = 240
        ta = api.Timeaxis(t0, dt, n)
        ts0 = api.Timeseries(ta=ta, fill_value=3.0, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)
        ts1 = api.time_shift(ts0, t1 - t0)
        ts2 = 2.0 * ts1.time_shift(t0 - t1)  # just to verify it still can take part in an expression

        for i in range(ts0.size()):
            self.assertAlmostEqual(ts0.value(i), ts1.value(i), 3, "expect values to be equal")
            self.assertAlmostEqual(ts0.value(i) * 2.0, ts2.value(i), 3, "expect values to be double value")
            self.assertEqual(ts0.time(i) + (t1 - t0), ts1.time(i), "expect time to be offset delta_t different")
            self.assertEqual(ts0.time(i), ts2.time(i), "expect time to be equal")
Esempio n. 17
0
    def run_simulator(self, model_t):
        # Simulation time axis
        dt0 = api.YMDhms(2015, 8, 24, 6)
        n_hours = 30
        dt = api.deltahours(1)
        utc = api.Calendar()  # No offset gives Utc
        t0 = utc.time(dt0)
        time_axis = api.Timeaxis(t0, dt, n_hours)

        # Some dummy ids not needed for the netcdf based repositories
        region_id = 0
        interpolation_id = 0

        # Simulation coordinate system
        epsg = "32633"

        # Configs and repositories
        region_config = RegionConfig(self.region_config_file)
        model_config = ModelConfig(self.model_config_file)
        region_model_repository = RegionModelRepository(
            region_config, model_config, model_t, epsg)
        interp_repos = InterpolationParameterRepository(model_config)
        date_str = "{}{:02}{:02}_{:02}".format(dt0.year, dt0.month, dt0.day,
                                               dt0.hour)
        base_dir = path.join(shyftdata_dir, "repository",
                             "arome_data_repository")
        f1 = "arome_metcoop_red_default2_5km_{}.nc".format(date_str)
        f2 = "arome_metcoop_red_test2_5km_{}.nc".format(date_str)

        ar1 = AromeDataRepository(epsg,
                                  base_dir,
                                  filename=f1,
                                  allow_subset=True)
        ar2 = AromeDataRepository(epsg,
                                  base_dir,
                                  filename=f2,
                                  elevation_file=f1,
                                  allow_subset=True)

        geo_ts_repository = GeoTsRepositoryCollection([ar1, ar2])

        simulator = DefaultSimulator(region_id, interpolation_id,
                                     region_model_repository,
                                     geo_ts_repository, interp_repos, None)
        n_cells = simulator.region_model.size()
        state_repos = DefaultStateRepository(model_t, n_cells)
        simulator.run(time_axis, state_repos.get_state(0))
Esempio n. 18
0
    def test_compute_lwc_percentiles(self):
        # Simulation time axis
        year, month, day, hour = 2010, 9, 1, 0
        dt = api.deltahours(24)
        n_steps = 400
        utc = api.Calendar()  # No offset gives Utc
        t0 = utc.time(api.YMDhms(year, month, day, hour))
        time_axis = api.Timeaxis(t0, dt, n_steps)

        # Some fake ids
        region_id = 0
        interpolation_id = 0

        # Simulation coordinate system
        epsg = "32633"

        # Model
        model_t = pt_gs_k.PTGSKModel

        # Configs and repositories
        dataset_config_file = path.join(path.dirname(__file__), "netcdf",
                                        "atnsjoen_datasets.yaml")
        region_config_file = path.join(path.dirname(__file__), "netcdf",
                                       "atnsjoen_calibration_region.yaml")
        region_config = RegionConfig(region_config_file)
        model_config = ModelConfig(self.model_config_file)
        dataset_config = YamlContent(dataset_config_file)
        region_model_repository = RegionModelRepository(region_config, model_config, model_t, epsg)
        interp_repos = InterpolationParameterRepository(model_config)
        netcdf_geo_ts_repos = []
        for source in dataset_config.sources:
            station_file = source["params"]["stations_met"]
            netcdf_geo_ts_repos.append(GeoTsRepository(source["params"], station_file, ""))
        geo_ts_repository = GeoTsRepositoryCollection(netcdf_geo_ts_repos)

        # Construct target discharge series
        simulator = DefaultSimulator(region_id, interpolation_id, region_model_repository,
                                    geo_ts_repository, interp_repos, None)
        n_cells = simulator.region_model.size()
        state_repos = DefaultStateRepository(model_t, n_cells)
        cid = 1
        simulator.region_model.set_state_collection(cid, True)
        simulator.run(time_axis, state_repos.get_state(0))
        self.assertAlmostEqual(simulator.region_model.cells[0].rc.pe_output.values[0], 0.039768354, 5) # just to verify pot.evap by regression, mm/h

        percentile_list = [10, 25, 50, 75, 90]
Esempio n. 19
0
    def test_idw_precipitation_transform_from_set_to_grid(self):
        """
        Test IDW interpolation transforms precipitation time-series according to time-axis and range.

        """
        idw_p = api.IDWPrecipitationParameter()
        self.assertEqual(idw_p.max_distance, 200000)
        self.assertEqual(idw_p.max_members, 20)
        fx = lambda z: [15 for x in range(self.n)]
        arome_grid = self._create_geo_precipitation_grid(
            self.nx, self.ny, self.dx_arome, fx)
        dest_grid_points = self._create_geo_point_grid(self.mnx, self.mny,
                                                       self.dx_model)
        ta = api.Timeaxis(self.t, self.d * 3, int(self.n / 3))
        dest_grid = api.idw_precipitation(arome_grid, dest_grid_points, ta,
                                          idw_p)
        self.assertIsNotNone(dest_grid)
        self.assertEqual(len(dest_grid), self.mnx * self.mny)
Esempio n. 20
0
    def test_timeseries_vector(self):
        c = api.Calendar()
        t0 = api.utctime_now()
        dt = api.deltahours(1)
        n = 240
        ta = api.Timeaxis(t0, dt, n)

        a = api.Timeseries(ta=ta, fill_value=3.0, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)
        b = api.Timeseries(ta=ta, fill_value=2.0, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)

        v = api.TsVector()
        v.append(a)
        v.append(b)

        self.assertEqual(len(v), 2)
        self.assertAlmostEqual(v[0].value(0), 3.0, "expect first ts to be 3.0")
        aa = api.Timeseries(ta=a.time_axis, values=a.values,
                            point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)  # copy construct (really copy the values!)
        a.fill(1.0)
        self.assertAlmostEqual(v[0].value(0), 1.0, "expect first ts to be 1.0, because the vector keeps a reference ")
        self.assertAlmostEqual(aa.value(0), 3.0)
Esempio n. 21
0
    def test_ts_transform(self):
        dv = np.arange(self.ta.size())
        v = api.DoubleVector.from_numpy(dv)
        t = api.UtcTimeVector();
        for i in range(self.ta.size()):
            t.push_back(self.ta(i).start)
        # t.push_back(self.ta(self.ta.size()-1).end) #important! needs n+1 points to determine n periods in the timeaxis
        t_start = self.ta.total_period().start
        dt = api.deltahours(1)
        tax = api.Timeaxis(t_start + api.deltaminutes(30), dt, self.ta.size())
        tsf = api.TsFactory()
        ts1 = tsf.create_point_ts(self.ta.size(), self.t, self.d, v)
        ts2 = tsf.create_time_point_ts(self.ta.total_period(), t, v)
        ts3 = api.TsFixed(tax, v)

        tst = api.TsTransform()
        tt1 = tst.to_average(t_start, dt, tax.size(), ts1)
        tt2 = tst.to_average(t_start, dt, tax.size(), ts2)
        tt3 = tst.to_average(t_start, dt, tax.size(), ts3)
        self.assertEqual(tt1.size(), tax.size())
        self.assertEqual(tt2.size(), tax.size())
        self.assertEqual(tt3.size(), tax.size())
Esempio n. 22
0
    def calc_alpha(cls,T):
        alpha=np.zeros(T.shape,dtype='float')
        #alpha[T<=Tice]=0.
        alpha[T>=cls.__T0]=1.
        indx=(T<cls.__T0)&(T>cls.__Tice)
        alpha[indx]=np.square((T[indx]-cls.__Tice)/(cls.__T0-cls.__Tice))
        return alpha
        
    @classmethod    
    def calc_RH(cls,T,Td,p):
        alpha = cls.calc_alpha(T)
        qsat = cls.calc_q(T,p,alpha)
        q = cls.calc_q(Td,p,alpha)
        return q/qsat
        
if __name__ == "__main__":
    rconf = {'EPSG':32633}
    params = {'data_dir':'D:/users/ysa/shyft_main/shyft-data',
              'stations_met':'netcdf/orchestration-testdata/All_variables_z_201408_5-31E_58-71N_0.75.nc'}
    utc = api.Calendar()  # No offset gives Utc
    time_axis = api.Timeaxis(utc.time(api.YMDhms(2014, 8,1, 2)), api.deltahours(3), 10*8)
    #grid_spec = (xll, yll, delta x, delta y, nx, ny)
    xll, yll, dx, dy, nx, ny = 266000, 6960000, 1000.0, 1000.0, 109, 80
    #bbox =[[x_min, x_max, x_max, x_min],
    #       [y_min, y_min, y_max, y_max]]
    bbox =[[xll, xll+dx*nx, xll+dx*nx, xll],
           [yll, yll, yll+dy*ny, yll+dy*ny]]
    repo = ERAInterimDataRepository(params,rconf)
    res = repo.get_timeseries(['temperature','wind_speed','relative_humidity','radiation','precipitation'], 
                              time_axis.total_period(), 
                              geo_location_criteria=bbox)
Esempio n. 23
0
    def test_model_initialize_and_run(self):
        num_cells = 20
        model_type = pt_gs_k.PTGSKModel
        model = self.build_model(model_type, pt_gs_k.PTGSKParameter, num_cells)
        self.assertEqual(model.size(), num_cells)
        # now modify snow_cv forest_factor to 0.1
        region_parameter = model.get_region_parameter()
        region_parameter.gs.snow_cv_forest_factor = 0.1
        region_parameter.gs.snow_cv_altitude_factor = 0.0001
        self.assertEqual(region_parameter.gs.snow_cv_forest_factor, 0.1)
        self.assertEqual(region_parameter.gs.snow_cv_altitude_factor, 0.0001)

        self.assertAlmostEqual(region_parameter.gs.effective_snow_cv(1.0, 0.0),
                               region_parameter.gs.snow_cv + 0.1)
        self.assertAlmostEqual(
            region_parameter.gs.effective_snow_cv(1.0, 1000.0),
            region_parameter.gs.snow_cv + 0.1 + 0.1)
        cal = api.Calendar()
        time_axis = api.Timeaxis(cal.time(api.YMDhms(2015, 1, 1, 0, 0, 0)),
                                 api.deltahours(1), 240)
        model_interpolation_parameter = api.InterpolationParameter()
        # degC/m, so -0.5 degC/100m
        model_interpolation_parameter.temperature_idw.default_temp_gradient = -0.005
        # if possible use closest neighbor points and solve gradient using equation,(otherwise default min/max height)
        model_interpolation_parameter.temperature_idw.gradient_by_equation = True
        # Max number of temperature sources used for one interpolation
        model_interpolation_parameter.temperature_idw.max_members = 6
        # 20 km is max distance
        model_interpolation_parameter.temperature_idw.max_distance = 20000
        # Pure linear interpolation
        model_interpolation_parameter.temperature_idw.distance_measure_factor = 1.0
        # This enables IDW with default temperature gradient.
        model_interpolation_parameter.use_idw_for_temperature = True
        self.assertAlmostEqual(
            model_interpolation_parameter.precipitation.scale_factor,
            1.02)  # just verify this one is as before change to scale_factor
        model.run_interpolation(
            model_interpolation_parameter, time_axis,
            self.create_dummy_region_environment(
                time_axis,
                model.get_cells()[int(num_cells / 2)].geo.mid_point()))
        s0 = pt_gs_k.PTGSKStateVector()
        for i in range(num_cells):
            si = pt_gs_k.PTGSKState()
            si.kirchner.q = 40.0
            s0.append(si)
        model.set_states(s0)
        model.set_state_collection(
            -1, True)  # enable state collection for all cells
        model.run_cells()
        cids = api.IntVector(
        )  # optional, we can add selective catchment_ids here
        sum_discharge = model.statistics.discharge(cids)

        self.assertIsNotNone(sum_discharge)
        avg_temperature = model.statistics.temperature(cids)
        avg_precipitation = model.statistics.precipitation(cids)
        self.assertIsNotNone(avg_precipitation)
        for time_step in range(time_axis.size()):
            precip_raster = model.statistics.precipitation(
                cids, time_step)  # example raster output
            self.assertEqual(precip_raster.size(), num_cells)
        avg_gs_sca = model.gamma_snow_response.sca(cids)  # swe output
        self.assertIsNotNone(avg_gs_sca)
        # lwc surface_heat alpha melt_mean melt iso_pot_energy temp_sw
        avg_gs_albedo = model.gamma_snow_state.albedo(cids)
        self.assertIsNotNone(avg_gs_albedo)
        self.assertEqual(avg_temperature.size(), time_axis.size(),
                         "expect results equal to time-axis size")
        copy_region_model = model.__class__(model)
        self.assertIsNotNone(copy_region_model)
        copy_region_model.run_cells(
        )  #just to verify we can copy and run the new model
Esempio n. 24
0
    def test_snow_and_ground_water_response_calibration(self):
        """
        Test dual calibration strategy:
            * First fit the three Kirchner parameters for
              ground water response during July, August, and
              September.
            * Then fit two snow routine parameters (tx and max_water)
              from November to April.
        """
        # Simulation time axis
        year, month, day, hour = 2010, 9, 1, 0
        dt = api.deltahours(24)
        n_steps = 400
        utc = api.Calendar()  # No offset gives Utc
        t0 = utc.time(api.YMDhms(year, month, day, hour))
        time_axis = api.Timeaxis(t0, dt, n_steps)

        # Some fake ids
        region_id = 0
        interpolation_id = 0

        # Simulation coordinate system
        epsg = "32633"

        # Model
        model_t = pt_gs_k.PTGSKOptModel

        # Configs and repositories
        dataset_config_file = path.join(path.dirname(__file__), "netcdf",
                                        "atnsjoen_datasets.yaml")
        region_config_file = path.join(path.dirname(__file__), "netcdf",
                                       "atnsjoen_calibration_region.yaml")
        region_config = RegionConfig(region_config_file)
        model_config = ModelConfig(self.model_config_file)
        dataset_config = YamlContent(dataset_config_file)
        region_model_repository = RegionModelRepository(
            region_config, model_config, model_t, epsg)
        interp_repos = InterpolationParameterRepository(model_config)
        netcdf_geo_ts_repos = []
        for source in dataset_config.sources:
            station_file = source["params"]["stations_met"]
            netcdf_geo_ts_repos.append(
                GeoTsRepository(source["params"], station_file, ""))
        geo_ts_repository = GeoTsRepositoryCollection(netcdf_geo_ts_repos)

        # Construct target discharge series
        simulator = DefaultSimulator(region_id, interpolation_id,
                                     region_model_repository,
                                     geo_ts_repository, interp_repos, None)
        n_cells = simulator.region_model.size()
        state_repos = DefaultStateRepository(model_t, n_cells)
        simulator.run(time_axis, state_repos.get_state(0))
        cid = 1
        target_discharge = simulator.region_model.statistics.discharge([cid])

        # Construct kirchner parameters
        param = simulator.region_model.parameter_t(
            simulator.region_model.get_region_parameter())
        print_param("True solution", param)

        kirchner_param_min = simulator.region_model.parameter_t(param)
        kirchner_param_max = simulator.region_model.parameter_t(param)
        # Kichner parameters are quite abstract (no physical meaning), so simply scale them
        kirchner_param_min.kirchner.c1 *= 0.8
        kirchner_param_min.kirchner.c2 *= 0.8
        kirchner_param_min.kirchner.c3 *= 0.8
        kirchner_param_max.kirchner.c1 *= 1.2
        kirchner_param_max.kirchner.c2 *= 1.2
        kirchner_param_max.kirchner.c3 *= 1.2
        # kirchner_t_start = utc.time(api.YMDhms(2011, 4, 1, 0))
        # kirchner_time_axis = api.Timeaxis(kirchner_t_start, dt, 150)
        kirchner_time_axis = time_axis

        # Construct gamma snow parameters (realistic tx and max_lwc)
        gamma_snow_param_min = simulator.region_model.parameter_t(param)
        gamma_snow_param_max = simulator.region_model.parameter_t(param)
        gamma_snow_param_min.gs.tx = -1.0  # Min snow/rain temperature threshold
        gamma_snow_param_min.gs.max_water = 0.05  # Min 8% max water in snow in costal regions
        gamma_snow_param_max.gs.tx = 1.0
        gamma_snow_param_max.gs.max_water = 0.25  # Max 35% max water content, or we get too little melt
        gs_t_start = utc.time(api.YMDhms(2010, 11, 1, 0))
        gs_time_axis = api.Timeaxis(gs_t_start, dt, 250)
        # gs_time_axis = time_axis

        # Find parameters
        target_spec = api.TargetSpecificationPts(target_discharge,
                                                 api.IntVector([cid]), 1.0,
                                                 api.KLING_GUPTA)
        target_spec_vec = api.TargetSpecificationVector(
        )  # TODO: We currently dont fix list initializer for vectors
        target_spec_vec.append(target_spec)
        # Construct a fake, perturbed starting point for calibration
        p_vec = [param.get(i) for i in range(param.size())]
        for i, name in enumerate(
            [param.get_name(i) for i in range(len(p_vec))]):
            if name not in ("c1" "c2", "c3", "TX", "max_water"):
                next
            if name in ("c1", "c2", "c3"):
                p_vec[i] = random.uniform(0.8 * p_vec[i], 1.2 * p_vec[i])
            elif name == "TX":
                p_vec[i] = random.uniform(gamma_snow_param_min.gs.tx,
                                          gamma_snow_param_max.gs.tx)
            elif name == "max_water":
                p_vec[i] = random.uniform(gamma_snow_param_min.gs.max_water,
                                          gamma_snow_param_max.gs.max_water)
        param.set(p_vec)
        print_param("Initial guess", param)
        # Two pass optimization, once for the ground water response, and second time for
        kirchner_p_opt = simulator.optimize(kirchner_time_axis,
                                            state_repos.get_state(0),
                                            target_spec_vec, param,
                                            kirchner_param_min,
                                            kirchner_param_max)
        gamma_snow_p_opt = simulator.optimize(gs_time_axis,
                                              state_repos.get_state(0),
                                              target_spec_vec, kirchner_p_opt,
                                              gamma_snow_param_min,
                                              gamma_snow_param_max)
        print_param("Half way result", kirchner_p_opt)
        print_param("Result", gamma_snow_p_opt)

        simulator.region_model.set_catchment_parameter(cid, gamma_snow_p_opt)
        simulator.run(time_axis, state_repos.get_state(0))
        found_discharge = simulator.region_model.statistics.discharge([cid])

        t_vs = np.array(target_discharge.v)
        t_ts = np.array(
            [target_discharge.time(i) for i in range(target_discharge.size())])
        f_vs = np.array(found_discharge.v)
        f_ts = np.array(
            [found_discharge.time(i) for i in range(found_discharge.size())])
Esempio n. 25
0
 def setUp(self):
     self.c = api.Calendar()
     self.d = api.deltahours(1)
     self.n = 24
     self.t = self.c.trim(api.utctime_now(), self.d)
     self.ta = api.Timeaxis(self.t, self.d, self.n)
Esempio n. 26
0
    def test_can_run_bayesian_kriging_from_observation_sites_to_1km_grid(self):
        """
        Somewhat more complex test, first do kriging of 1 timeseries out to grid (expect same values flat)
        then do kriging of 3 time-series out to the grid (expect different values, no real verification here since this is done elsewhere

        """
        # arrange the test with a btk_parameter, a source grid and a destination grid
        btk_parameter = api.BTKParameter(temperature_gradient=-0.6,
                                         temperature_gradient_sd=0.25,
                                         sill=25.0,
                                         nugget=0.5,
                                         range=20000.0,
                                         zscale=20.0)
        fx = lambda z: api.DoubleVector.from_numpy(np.zeros(self.n))

        grid_1km_1 = self._create_geo_point_grid(self.mnx, self.mny,
                                                 self.dx_model)
        grid_1km_3 = self._create_geo_point_grid(self.mnx, self.mny,
                                                 self.dx_model)

        observation_sites = api.TemperatureSourceVector()
        ta_obs = api.Timeaxis(self.t, self.d * 3, int(self.n / 3))
        ta_grid = api.Timeaxis(self.t, self.d, self.n)

        ts_site_1 = api.Timeseries(
            ta_obs,
            values=api.DoubleVector.from_numpy(
                (20.0 - 0.6 * 5.0 / 100) + 3.0 * np.sin(
                    np.arange(start=0, stop=ta_obs.size(), step=1) * 2 *
                    np.pi / 8.0 - np.pi / 2.0)))
        ts_site_2 = api.Timeseries(
            ta_obs,
            values=api.DoubleVector.from_numpy(
                (20.0 - 0.6 * 500.0 / 100) + 3.0 * np.sin(
                    np.arange(start=0, stop=ta_obs.size(), step=1) * 2 *
                    np.pi / 8.0 - np.pi / 2.0)))
        ts_site_3 = api.Timeseries(
            ta_obs,
            values=api.DoubleVector.from_numpy(
                (20.0 - 0.6 * 1050.0 / 100) + 3.0 * np.sin(
                    np.arange(start=0, stop=ta_obs.size(), step=1) * 2 *
                    np.pi / 8.0 - np.pi / 2.0)))

        observation_sites.append(
            api.TemperatureSource(api.GeoPoint(50.0, 50.0, 5.0), ts_site_1))

        # act 1: just one time-series put into the system, should give same ts (true-averaged) in all the grid-1km_ts (which can be improved using std.gradient..)
        grid_1km_1ts = api.bayesian_kriging_temperature(
            observation_sites, grid_1km_1, ta_grid, btk_parameter)

        # assert 1:
        self.assertEqual(len(grid_1km_1ts), self.mnx * self.mny)
        expected_grid_1ts_values = ts_site_1.average(
            api.Timeaxis2(ta_grid)).values.to_numpy()

        for gts in grid_1km_1ts:
            self.assertEqual(gts.ts.size(), ta_grid.size())
            self.assertTrue(
                np.allclose(expected_grid_1ts_values,
                            gts.ts.values.to_numpy()))

        observation_sites.append(
            api.TemperatureSource(api.GeoPoint(9000.0, 500.0, 500), ts_site_2))
        observation_sites.append(
            api.TemperatureSource(api.GeoPoint(9000.0, 12000.0, 1050.0),
                                  ts_site_3))

        grid_1km_3ts = api.bayesian_kriging_temperature(
            observation_sites, grid_1km_3, ta_grid, btk_parameter)

        self.assertEqual(len(grid_1km_3ts), self.mnx * self.mny)

        for gts in grid_1km_3ts:
            self.assertEqual(gts.ts.size(), ta_grid.size())
            self.assertFalse(
                np.allclose(expected_grid_1ts_values,
                            gts.ts.values.to_numpy()))
Esempio n. 27
0
    def construct_repos(self, overrides):
        """
        Construct repositories
        """
        # Read region, model and datasets config files
        region_config_file = os.path.join(self.config_dir,
                                          self.region_config_file)
        self.region_config = RegionConfig(region_config_file)

        self.model_config_file = os.path.join(self.config_dir,
                                              self.model_config_file)
        model_config = ModelConfig(self.model_config_file,
                                   overrides=overrides.get("model", {}))

        datasets_config_file = os.path.join(self.config_dir,
                                            self.datasets_config_file)
        datasets_config = YamlContent(datasets_config_file)

        interpolation_config_file = os.path.join(
            self.config_dir, self.interpolation_config_file)
        interpolation_config = InterpolationConfig(interpolation_config_file)

        # Construct RegionModelRepository
        self.region_model = region_model_repo_constructor(
            self.region_config.repository()['class'], self.region_config,
            model_config, self.region_model_id)

        # Construct InterpolationParameterRepository
        self.interp_repos = InterpolationParameterRepository(
            interpolation_config)

        # Construct GeoTsRepository
        self.geo_ts = geots_repo_from_config(
            datasets_config_file,
            self.region_config.domain()["EPSG"])

        # Construct destination repository
        self.dst_repo = []
        if hasattr(datasets_config, 'destinations'):
            for repo in datasets_config.destinations:
                repo['repository'] = target_repo_constructor(
                    repo['repository'], repo['params'])
                #[dst['time_axis'].update({'start_datetime': utctime_from_datetime(dst['time_axis']['start_datetime'])})
                # for dst in repo['1D_timeseries'] if dst['time_axis'] is not None]
                [
                    dst.update({'time_axis': self.time_axis})
                    if dst['time_axis'] is None else dst.update({
                        'time_axis':
                        api.Timeaxis(
                            utctime_from_datetime(
                                dst['time_axis']['start_datetime']),
                            dst['time_axis']['time_step_length'],
                            dst['time_axis']['number_of_steps'])
                    }) for dst in repo['1D_timeseries']
                ]
                self.dst_repo.append(repo)

        # Construct reference data repository
        self.ref_repo = []
        if hasattr(self, 'references'):
            for repo in self.references:
                repo_ = target_repo_constructor(repo['repository'],
                                                repo['params'])
                [dst.update({'repo': repo_}) for dst in repo['1D_timeseries']]
                self.ref_repo.extend(repo['1D_timeseries'])

        # Construct StateRepository
        if hasattr(self, 'initial_state'):
            self.initial_state_repo = self.initial_state['repository'][
                'class'](**self.initial_state['repository']['params'])
        if hasattr(self, 'end_state'):
            self.end_state_repo = self.end_state['repository']['class'](
                **self.end_state['repository']['params'])
Esempio n. 28
0
 def dacc_time(t):
     t0 = int(t[0])
     t1 = int(t[1])
     return noop_time(t) if issubset else api.Timeaxis(
         t0, t1 - t0,
         len(t) - 1)
Esempio n. 29
0
 def noop_time(t):
     t0 = int(t[0])
     t1 = int(t[1])
     return api.Timeaxis(t0, t1 - t0, len(t))