Ejemplo n.º 1
0
    def test_get_ensemble(self):
        EPSG = 32633
        upper_left_x = 436100.0
        upper_left_y = 7417800.0
        nx = 74
        ny = 94
        dx = 1000.0
        dy = 1000.0
        # Period start
        n_hours = 30
        utc = api.Calendar()  # No offset gives Utc
        t0 = utc.time(2015, 7, 26)
        period = api.UtcPeriod(t0, t0 + api.deltahours(n_hours))
        t_c = t0 + api.deltahours(1)

        base_dir = path.join(shyftdata_dir, "netcdf", "arome")
        # pattern = "fc*.nc"
        pattern = "fc_(\d{4})(\d{2})(\d{2})[T_](\d{2})Z?.nc$"
        bpoly = box(upper_left_x, upper_left_y - ny * dy,
                    upper_left_x + nx * dx, upper_left_y)
        try:
            repos = MetNetcdfDataRepository(EPSG, base_dir, filename=pattern)
            data_names = ("temperature", "wind_speed", "relative_humidity")
            ensemble = repos.get_forecast_ensemble(data_names,
                                                   period,
                                                   t_c,
                                                   geo_location_criteria=bpoly)
            self.assertTrue(isinstance(ensemble, list))
            self.assertEqual(len(ensemble), 10)
        except MetNetcdfDataRepositoryError as adre:
            self.skipTest(
                "(test inconclusive- missing arome-data {0})".format(adre))
Ejemplo n.º 2
0
    def test_no_point_inside_polygon_bounds(self):
        EPSG, bbox, bpoly = self.arome_epsg_bbox
        bounds = bpoly.bounds
        bpoly = box(bounds[0], 6010000.0, bounds[2], 6035000.0)
        # Period start
        year = 2015
        month = 8
        day = 24
        hour = 6
        n_hours = 30
        date_str = "{}{:02}{:02}_{:02}".format(year, month, day, hour)
        utc = api.Calendar()  # No offset gives Utc
        t0 = api.YMDhms(year, month, day, hour)
        period = api.UtcPeriod(utc.time(t0),
                               utc.time(t0) + api.deltahours(n_hours))

        base_dir = path.join(shyftdata_dir, "repository",
                             "arome_data_repository")
        filename = "arome_metcoop_red_default2_5km_{}.nc".format(date_str)
        reader = MetNetcdfDataRepository(EPSG,
                                         base_dir,
                                         filename=filename,
                                         padding=0.0)
        data_names = ("temperature", "wind_speed", "precipitation",
                      "relative_humidity")
        with self.assertRaises(MetNetcdfDataRepositoryError) as context:
            reader.get_timeseries(data_names,
                                  period,
                                  geo_location_criteria=bpoly)
        self.assertEqual(
            "No points in dataset which are within the bounding box of the geo_location_criteria polygon.",
            context.exception.args[0])
Ejemplo n.º 3
0
    def test_utc_period_is_None(self):
        EPSG, bbox, bpoly = self.arome_epsg_bbox
        # Period start
        utc = api.Calendar()  # No offset gives Utc
        t0 = utc.time(2015, 8, 24, 6)
        t0_ymdhs = utc.calendar_units(t0)
        date_str = "{}{:02}{:02}_{:02}".format(
            *[getattr(t0_ymdhs, k) for k in ['year', 'month', 'day', 'hour']])
        period = None

        base_dir = path.join(shyftdata_dir, "repository",
                             "arome_data_repository")
        filename = "arome_metcoop_red_default2_5km_{}.nc".format(date_str)
        reader = MetNetcdfDataRepository(EPSG, base_dir, filename=filename)
        src_name = "temperature"
        var_name_in_file = [
            k for k, v in reader._arome_shyft_map.items() if v == src_name
        ][0]
        with netCDF4.Dataset(path.join(base_dir, filename)) as ds:
            var = ds.variables[var_name_in_file]
            nb_timesteps = var.shape[var.dimensions.index('time')]
        srcs = reader.get_timeseries((src_name, ),
                                     period,
                                     geo_location_criteria=bpoly)
        self.assertEqual(srcs[src_name][0].ts.size(), nb_timesteps)
Ejemplo n.º 4
0
 def test_wrong_file(self):
     with self.assertRaises(MetNetcdfDataRepositoryError) as context:
         utc = api.Calendar()  # No offset gives Utc
         t0 = api.YMDhms(2015, 12, 25, 18)
         period = api.UtcPeriod(utc.time(t0),
                                utc.time(t0) + api.deltahours(30))
         ar1 = MetNetcdfDataRepository(32632,
                                       shyftdata_dir,
                                       filename="plain_wrong.nc")
         ar1.get_timeseries(("temperature", ),
                            period,
                            geo_location_criteria=None)
     self.assertTrue(
         all(x in context.exception.args[0] for x in ["File", "not found"]))
Ejemplo n.º 5
0
 def test_wrong_forecast(self):
     with self.assertRaises(MetNetcdfDataRepositoryError) as context:
         utc = api.Calendar()  # No offset gives Utc
         t0 = api.YMDhms(2015, 12, 25, 18)
         period = api.UtcPeriod(utc.time(t0),
                                utc.time(t0) + api.deltahours(30))
         ar1 = MetNetcdfDataRepository(
             32632,
             shyftdata_dir,
             filename="plain_wrong_(\d{4})(\d{2})(\d{2})[T_](\d{2})Z?.nc")
         ar1.get_forecast(("temperature", ),
                          period,
                          utc.time(t0),
                          geo_location_criteria=None)
     self.assertTrue(
         all(x in context.exception.args[0] for x in
             ["No matches found for file_pattern = ", "and t_c = "]))
Ejemplo n.º 6
0
    def test_get_forecast_collection(self):
        n_hours = 30
        dt = api.deltahours(1)
        utc = api.Calendar()  # No offset gives Utc
        tc = api.YMDhms(2015, 8, 24, 6)
        t0 = utc.time(tc)
        period = api.UtcPeriod(t0, t0 + api.deltahours(n_hours))
        date_str = "{}{:02}{:02}_{:02}".format(tc.year, tc.month, tc.day,
                                               tc.hour)

        epsg, bbox, bpoly = self.arome_epsg_bbox

        base_dir = path.join(shyftdata_dir, "repository",
                             "arome_data_repository")
        f1_elev = "arome_metcoop_red_default2_5km_{}.nc".format(date_str)
        f1 = "arome_metcoop_red_default2_5km_(\d{4})(\d{2})(\d{2})[T_](\d{2})Z?.nc$"
        #f2 = "arome_metcoop_red_test2_5km_{}.nc".format(date_str)
        f2 = "arome_metcoop_red_test2_5km_(\d{4})(\d{2})(\d{2})[T_](\d{2})Z?.nc$"

        ar1 = MetNetcdfDataRepository(epsg,
                                      base_dir,
                                      filename=f1,
                                      allow_subset=True)
        ar2 = MetNetcdfDataRepository(epsg,
                                      base_dir,
                                      filename=f2,
                                      elevation_file=f1_elev,
                                      allow_subset=True)

        geo_ts_repository = GeoTsRepositoryCollection([ar1, ar2])
        source_names = ("temperature", "radiation")
        sources = geo_ts_repository.get_forecast(source_names,
                                                 period,
                                                 t0,
                                                 geo_location_criteria=bpoly)
        self.assertTrue(all([x in source_names for x in sources]))

        geo_ts_repository = GeoTsRepositoryCollection([ar1, ar2],
                                                      reduce_type="add")
        with self.assertRaises(GeoTsRepositoryCollectionError) as context:
            sources = geo_ts_repository.get_forecast(
                ("temperature", "radiation"),
                period,
                t0,
                geo_location_criteria=bpoly)
Ejemplo n.º 7
0
 def test_wrong_elevation_file(self):
     with self.assertRaises(MetNetcdfDataRepositoryError) as context:
         MetNetcdfDataRepository(32632,
                                 shyftdata_dir,
                                 filename="",
                                 elevation_file="plain_wrong.nc")
     self.assertTrue(
         all(x in context.exception.args[0]
             for x in ["Elevation file", "not found"]))
Ejemplo n.º 8
0
    def __init__(self,
                 epsg,
                 filename,
                 padding=15000.,
                 flattened=False,
                 allow_year_shift=True,
                 cache_data=True):
        """
        Construct the netCDF4 dataset reader for concatenated gridded forecasts and initialize data retrieval.

        Parameters
        ----------
        epsg: string
            Unique coordinate system id for result coordinates. Currently "32632" and "32633" are supported.
        filename: string
            Path to netcdf file containing concatenated forecasts
        flattened: bool
            Flags whether grid_points are flattened
        allow_year_shift: bool
            Flags whether shift of years is allowed
        """
        self.allow_year_shift = allow_year_shift
        self.cache_data = cache_data
        self.cache = None
        if flattened:
            self.wx_repo = ConcatDataRepository(epsg,
                                                filename,
                                                padding=padding)
        elif not flattened:
            self.wx_repo = MetNetcdfDataRepository(epsg,
                                                   None,
                                                   filename,
                                                   padding=padding)
            filename = os.path.expandvars(filename)
            with Dataset(filename) as dataset:
                time = dataset.variables.get("time", None)
                time = convert_netcdf_time(time.units, time)
                self.wx_repo.time = time

        self.source_type_map = {
            "relative_humidity": api.RelHumSource,
            "temperature": api.TemperatureSource,
            "precipitation": api.PrecipitationSource,
            "radiation": api.RadiationSource,
            "wind_speed": api.WindSpeedSource
        }

        self.source_vector_map = {
            "relative_humidity": api.RelHumSourceVector,
            "temperature": api.TemperatureSourceVector,
            "precipitation": api.PrecipitationSourceVector,
            "radiation": api.RadiationSourceVector,
            "wind_speed": api.WindSpeedSourceVector
        }
Ejemplo n.º 9
0
    def test_get_ensemble_forecast_collection(self):
        EPSG = 32633
        upper_left_x = 436100.0
        upper_left_y = 7417800.0
        nx = 74
        ny = 94
        dx = 1000.0
        dy = 1000.0
        t0 = api.YMDhms(2015, 7, 26, 0)
        n_hours = 30
        utc = api.Calendar()  # No offset gives Utc
        period = api.UtcPeriod(utc.time(t0),
                               utc.time(t0) + api.deltahours(n_hours))
        t_c = utc.time(t0) + api.deltahours(1)

        base_dir = path.join(shyftdata_dir, "netcdf", "arome")
        # pattern = "fc*.nc"
        pattern = "fc_(\d{4})(\d{2})(\d{2})[T_](\d{2})Z?.nc$"
        bpoly = box(upper_left_x, upper_left_y - ny * dy,
                    upper_left_x + nx * dx, upper_left_y)
        try:
            ar1 = MetNetcdfDataRepository(EPSG, base_dir, filename=pattern)
            ar2 = MetNetcdfDataRepository(EPSG, base_dir, filename=pattern)
            repos = GeoTsRepositoryCollection([ar1, ar2])
            data_names = ("temperature", "wind_speed", "relative_humidity")
            ensemble = repos.get_forecast_ensemble(data_names, period, t_c,
                                                   None)
            self.assertTrue(isinstance(ensemble, list))
            self.assertEqual(len(ensemble), 10)
            with self.assertRaises(GeoTsRepositoryCollectionError) as context:
                repos = GeoTsRepositoryCollection([ar1, ar2],
                                                  reduce_type="add")
                repos.get_forecast_ensemble(data_names,
                                            period,
                                            t_c,
                                            geo_location_criteria=bpoly)
            self.assertEqual("Only replace is supported yet",
                             context.exception.args[0])
        except MetNetcdfDataRepositoryError as adre:
            self.skipTest(
                "(test inconclusive- missing arome-data {0})".format(adre))
Ejemplo n.º 10
0
    def test_get_timeseries_collection(self):
        tc = api.YMDhms(2015, 8, 24, 6)
        n_hours = 30
        dt = api.deltahours(1)
        utc = api.Calendar()  # No offset gives Utc
        t0 = utc.time(tc)
        period = api.UtcPeriod(t0, t0 + api.deltahours(n_hours))
        date_str = "{}{:02}{:02}_{:02}".format(tc.year, tc.month, tc.day,
                                               tc.hour)

        epsg, bbox, bpoly = self.arome_epsg_bbox

        base_dir = path.join(shyftdata_dir, "repository",
                             "arome_data_repository")
        f1 = "arome_metcoop_red_default2_5km_{}.nc".format(date_str)
        f2 = "arome_metcoop_red_test2_5km_{}.nc".format(date_str)

        ar1 = MetNetcdfDataRepository(epsg,
                                      base_dir,
                                      filename=f1,
                                      allow_subset=True)
        ar2 = MetNetcdfDataRepository(epsg,
                                      base_dir,
                                      filename=f2,
                                      elevation_file=f1,
                                      allow_subset=True)

        geo_ts_repository = GeoTsRepositoryCollection([ar1, ar1, ar2])
        sources_replace = geo_ts_repository.get_timeseries(
            ("temperature", "radiation"), period, geo_location_criteria=bpoly)

        with self.assertRaises(GeoTsRepositoryCollectionError) as context:
            GeoTsRepositoryCollection([ar1, ar2], reduce_type="foo")

        geo_ts_repository = GeoTsRepositoryCollection([ar1, ar1, ar2],
                                                      reduce_type="add")
        sources_add = geo_ts_repository.get_timeseries(
            ("temperature", "radiation"), period, geo_location_criteria=bpoly)
        self.assertGreater(len(sources_add["temperature"]),
                           len(sources_replace["temperature"]))
Ejemplo n.º 11
0
    def test_get_forecast(self):
        # Period start
        n_hours = 65
        utc = api.Calendar()  # No offset gives Utc
        t0 = utc.time(2015, 8, 24, 6)
        period1 = api.UtcPeriod(t0, t0 + api.deltahours(n_hours))
        period2 = api.UtcPeriod(
            t0 + api.deltahours(6),
            t0 + api.deltahours(6) + api.deltahours(n_hours))
        t_c1 = t0 + api.deltahours(1)
        t_c2 = t0 + api.deltahours(7)

        base_dir = path.join(shyftdata_dir, "repository",
                             "arome_data_repository")
        # pattern = "arome_metcoop*default2_5km_*.nc"
        pattern = "arome_metcoop_red_default2_5km_(\d{4})(\d{2})(\d{2})[T_](\d{2})Z?.nc$"
        EPSG, bbox, bpoly = self.arome_epsg_bbox

        repos = MetNetcdfDataRepository(EPSG, base_dir, filename=pattern)
        data_names = ("temperature", "wind_speed", "precipitation",
                      "relative_humidity")
        tc1_sources = repos.get_forecast(data_names,
                                         period1,
                                         t_c1,
                                         geo_location_criteria=bpoly)
        tc2_sources = repos.get_forecast(data_names,
                                         period2,
                                         t_c2,
                                         geo_location_criteria=bpoly)

        self.assertTrue(len(tc1_sources) == len(tc2_sources))
        self.assertTrue(set(tc1_sources) == set(data_names))
        self.assertTrue(tc1_sources["temperature"][0].ts.size() == n_hours + 1)

        tc1_precip = tc1_sources["precipitation"][0].ts
        tc2_precip = tc2_sources["precipitation"][0].ts

        self.assertEqual(tc1_precip.size(), n_hours)
        self.assertTrue(tc1_precip.time(0) != tc2_precip.time(0))
Ejemplo n.º 12
0
    def test_tiny_bbox(self):
        EPSG, _, _ = self.arome_epsg_bbox

        x = 432425.910493  # x coord of one pt in test file
        y = 6819847.92879  # y coord of one pt in test file
        dxy = 1000.  # should be less than the grid resolution (2500 m) to enclose only one point
        bpoly = box(x - dxy, y - dxy, x + dxy,
                    y + dxy)  # a polygon containing only tht above point

        # Period start
        year = 2015
        month = 8
        day = 24
        hour = 6
        n_hours = 30
        date_str = "{}{:02}{:02}_{:02}".format(year, month, day, hour)
        utc = api.Calendar()  # No offset gives Utc
        t0 = api.YMDhms(year, month, day, hour)
        period = api.UtcPeriod(utc.time(t0),
                               utc.time(t0) + api.deltahours(n_hours))

        base_dir = path.join(shyftdata_dir, "repository",
                             "arome_data_repository")
        filename = "arome_metcoop_red_default2_5km_{}.nc".format(date_str)
        reader = MetNetcdfDataRepository(EPSG,
                                         base_dir,
                                         filename=filename,
                                         padding=0.0)
        data_names = ("temperature", "wind_speed", "precipitation",
                      "relative_humidity")
        try:
            tss = reader.get_timeseries(data_names,
                                        period,
                                        geo_location_criteria=bpoly)
        except MetNetcdfDataRepositoryError as err:
            self.fail(
                "reader.get_timeseries raised MetNetcdfDataRepositoryError('{}') "
                "unexpectedly.".format(err.args[0]))
        self.assertEqual(len(tss['temperature']), 1)
Ejemplo n.º 13
0
    def test_geo_location_criteria_is_None(self):
        EPSG, _, _ = self.arome_epsg_bbox
        # Period start
        year = 2015
        month = 8
        day = 24
        hour = 6
        n_hours = 30
        date_str = "{}{:02}{:02}_{:02}".format(year, month, day, hour)
        utc = api.Calendar()  # No offset gives Utc
        t0 = api.YMDhms(year, month, day, hour)
        period = api.UtcPeriod(utc.time(t0),
                               utc.time(t0) + api.deltahours(n_hours))

        base_dir = path.join(shyftdata_dir, "repository",
                             "arome_data_repository")
        filename = "arome_metcoop_red_default2_5km_{}.nc".format(date_str)
        reader = MetNetcdfDataRepository(EPSG, base_dir, filename=filename)
        data_names = ("temperature", "wind_speed", "precipitation",
                      "relative_humidity")
        with netCDF4.Dataset(path.join(base_dir, filename)) as ds:
            nb_pts_in_file = ds.dimensions['x'].size * ds.dimensions['y'].size
        srcs = reader.get_timeseries(data_names, period, None)
        self.assertEqual(len(srcs['temperature']), nb_pts_in_file)
Ejemplo n.º 14
0
    def test_get_timeseries(self):
        """
        Simple regression test of arome data respository.
        """
        EPSG, bbox, bpoly = self.arome_epsg_bbox

        # Period start
        n_hours = 30
        t0 = api.YMDhms(2015, 8, 24, 0)
        date_str = "{}{:02}{:02}_{:02}".format(t0.year, t0.month, t0.day,
                                               t0.hour)
        utc = api.Calendar()  # No offset gives Utc
        period = api.UtcPeriod(utc.time(t0),
                               utc.time(t0) + api.deltahours(n_hours))

        base_dir = path.join(shyftdata_dir, "repository",
                             "arome_data_repository")
        f1 = "arome_metcoop_red_default2_5km_{}_diff_time_unit.nc".format(
            date_str)
        f2 = "arome_metcoop_red_test2_5km_{}.nc".format(date_str)

        ar1 = MetNetcdfDataRepository(EPSG, base_dir, filename=f1)
        ar2 = MetNetcdfDataRepository(EPSG,
                                      base_dir,
                                      filename=f2,
                                      elevation_file=f1)
        ar1_data_names = ("temperature", "wind_speed", "precipitation",
                          "relative_humidity")
        ar2_data_names = ("radiation", )
        sources = ar1.get_timeseries(ar1_data_names,
                                     period,
                                     geo_location_criteria=bpoly)
        self.assertTrue(len(sources) > 0)
        sources2 = ar2.get_timeseries(ar2_data_names,
                                      period,
                                      geo_location_criteria=bpoly)

        self.assertTrue(set(sources) == set(ar1_data_names))
        self.assertTrue(set(sources2) == set(ar2_data_names))
        self.assertTrue(sources["temperature"][0].ts.size() == n_hours + 1)
        r0 = sources2["radiation"][0].ts
        p0 = sources["precipitation"][0].ts
        temp0 = sources["temperature"][0].ts
        self.assertTrue(r0.size() == n_hours)
        self.assertTrue(p0.size() == n_hours)
        self.assertTrue(r0.time(0) == temp0.time(0))
        self.assertTrue(p0.time(0) == temp0.time(0))
        self.assertTrue(
            r0.time_axis.total_period().end == temp0.time(temp0.size() - 1))
        self.assertTrue(
            p0.time_axis.total_period().end == temp0.time(temp0.size() - 1))
        self.assertTrue(p0.time(0), period.start)
Ejemplo n.º 15
0
    def test_run_arome_ensemble(self):
        # Simulation time axis
        utc = api.Calendar()  # No offset gives Utc
        t0 = utc.time(2015, 7, 26, 0)
        n_hours = 30
        dt = api.deltahours(1)
        time_axis = api.TimeAxisFixedDeltaT(t0, dt, n_hours)

        # Some dummy ids not needed for the netcdf based repositories
        region_id = 0
        interpolation_id = 0

        # Simulation coordinate system
        epsg = "32633"
        # Configs and repositories
        region_model_repository = CFRegionModelRepository(
            self.region_config, self.model_config)
        interp_repos = InterpolationParameterRepository(
            self.interpolation_config)
        base_dir = path.join(shyftdata_dir, "netcdf", "arome")
        pattern = "fc*.nc"
        try:
            geo_ts_repository = MetNetcdfDataRepository(epsg,
                                                        base_dir,
                                                        filename=pattern,
                                                        allow_subset=True)
        except Exception as e:
            print("**** test_run_arome_ensemble: Arome data missing or"
                  " wrong, test inconclusive ****")
            print("****{}****".format(e))
            self.skipTest(
                "**** test_run_arome_ensemble: Arome data missing or wrong, test "
                "inconclusive ****\n\t exception:{}".format(e))
        simulator = DefaultSimulator(region_id, interpolation_id,
                                     region_model_repository,
                                     geo_ts_repository, interp_repos, None)
        state_repos = DefaultStateRepository(simulator.region_model)
        simulators = simulator.create_ensembles(time_axis, t0,
                                                state_repos.get_state(0))
        for s in simulators:
            s.simulate()
Ejemplo n.º 16
0
    def test_subsets(self):
        EPSG, bbox, bpoly = self.arome_epsg_bbox
        # Period start
        year = 2015
        month = 8
        day = 24
        hour = 6
        n_hours = 30
        date_str = "{}{:02}{:02}_{:02}".format(year, month, day, hour)
        utc = api.Calendar()  # No offset gives Utc
        t0 = api.YMDhms(year, month, day, hour)
        period = api.UtcPeriod(utc.time(t0),
                               utc.time(t0) + api.deltahours(n_hours))

        base_dir = path.join(shyftdata_dir, "repository",
                             "arome_data_repository")
        filename = "arome_metcoop_red_default2_5km_{}.nc".format(date_str)

        data_names = ("temperature", "wind_speed", "precipitation",
                      "relative_humidity", "radiation")
        allow_subset = False
        reader = MetNetcdfDataRepository(EPSG,
                                         base_dir,
                                         filename=filename,
                                         allow_subset=allow_subset)
        with self.assertRaises(MetNetcdfDataRepositoryError) as context:
            reader.get_timeseries(data_names, period, None)
        self.assertEqual("Could not find all data fields",
                         context.exception.args[0])
        allow_subset = True
        reader = MetNetcdfDataRepository(EPSG,
                                         base_dir,
                                         filename=filename,
                                         allow_subset=allow_subset)
        try:
            sources = reader.get_timeseries(data_names,
                                            period,
                                            geo_location_criteria=bpoly)
        except MetNetcdfDataRepositoryError as e:
            self.fail(
                "MetNetcdfDataRepository.get_timeseries(data_names, period, None) "
                "raised MetNetcdfDataRepositoryError unexpectedly.")
        self.assertEqual(len(sources), len(data_names) - 1)
Ejemplo n.º 17
0
    def test_transform_functions_variable_interval(self):
        """
        test the _transform_raw function.
        """
        EPSG, bbox, bpoly = self.arome_epsg_bbox

        # Period start
        n_hours = 30
        t0 = api.YMDhms(2015, 8, 24, 0)
        date_str = "{}{:02}{:02}_{:02}".format(t0.year, t0.month, t0.day,
                                               t0.hour)
        utc = api.Calendar()  # No offset gives Utc

        base_dir = path.join(shyftdata_dir, "repository",
                             "arome_data_repository")
        f1 = "arome_metcoop_red_default2_5km_{}_diff_time_unit.nc".format(
            date_str)
        ar1 = MetNetcdfDataRepository(EPSG, base_dir, filename=f1)
        np_raw_array = np.array(
            [  # 0  # 1 #  2 #  3
                [1.0, 2.0, 3.0, 4.0], [1.1, 2.1, 3.1, 4.1],
                [1.2, 2.2, 3.2, 4.2], [1.4, 2.5, 3.6, 4.7]
            ],
            dtype=np.float64)
        raw_values = {
            'wind_speed': (np_raw_array, 'wind_speed', 'm/s'),
            'rel_hum': (np_raw_array, 'relative_humidity_2m', '?'),
            'temperature': (273.15 + np_raw_array, 'air_temperature_2m', 'K'),
            'radiation':
            (3600.0 * np_raw_array,
             'integral_of_surface_downwelling_shortwave_flux_in_air_wrt_time',
             'W s/m2'),
            'prepitation_acc':
            (np_raw_array, 'precipitation_amount_acc', 'Mg/m^2'),
            'prepitation': (np_raw_array, 'precipitation_amount', 'mm')
        }
        raw_time = np.array([0, 3600, 7200, 7200 + 2 * 3600],
                            dtype=np.int64)  # last step is 2 hours!

        rd = ar1._transform_raw(raw_values, raw_time)
        ta3 = api.TimeAxis(api.UtcTimeVector(raw_time[:-1]),
                           api.time(int(raw_time[-1])))
        ta4 = api.TimeAxis(
            api.UtcTimeVector(raw_time),
            api.time(int(raw_time[-1] +
                         2 * 3600)))  # assume last step is also 2 hours
        e_precip_acc = np.array(
            [  # 0  # 1 #  2 #  3
                [100.0, 100.0, 100.0, 100.0],
                [100.0, 100.0, 100.0, 100.0],
                [100.0, 150.0, 200.0, 250.0],
            ],
            dtype=np.float64)
        e_precip = np.array(
            [  # 0  # 1 #  2 #  3
                [1.1, 2.1, 3.1, 4.1], [1.2, 2.2, 3.2, 4.2],
                [1.4, 2.5, 3.6, 4.7]
            ],
            dtype=np.float64)
        e_rad = np.array(
            [  # 0  # 1 #  2 #  3
                [0.1, 0.1, 0.1, 0.1],
                [0.1, 0.1, 0.1, 0.1],
                [0.1, 0.15, 0.2, 0.25],
            ],
            dtype=np.float64)
        e = {
            'wind_speed': (np_raw_array, ta4),
            'rel_hum': (np_raw_array, ta4),
            'temperature': (np_raw_array, ta4),
            'radiation': (e_rad, ta3),
            'prepitation_acc': (e_precip_acc, ta3),
            'prepitation': (e_precip, ta3)
        }

        self.assertIsNotNone(rd)
        for k, r in rd.items():
            self.assertTrue(k in e)
            self.assertEqual(r[1], e[k][1], "expect correct time-axis")
            self.assertTrue(np.allclose(r[0], e[k][0]),
                            "expect exact correct values")
Ejemplo n.º 18
0
 def test_wrong_directory(self):
     with self.assertRaises(MetNetcdfDataRepositoryError) as context:
         MetNetcdfDataRepository(32632, "Foobar", filename="")
     self.assertEqual("No such directory 'Foobar'",
                      context.exception.args[0])