Пример #1
0
 def test_time_large_number(self):
     a = 1000.0*api.time(1534832966.984426)
     self.assertIsNotNone(a)
     b = api.time(a)/1000.0
     self.assertIsNotNone(b)
     sb = str(b)
     self.assertTrue(len(sb) > 0)
     pass
Пример #2
0
 def test_time_hash(self):
     t0 = api.time('2018-01-01T01:02:03Z')
     t1 = api.time('2018-01-01T01:02:03Z')
     h0 = hash(t0)
     h1 = hash(t1)
     self.assertEqual(h0, h1)
     d = {t0: 'A'}
     self.assertTrue(t0 in d)
     self.assertTrue(t1 in d)
Пример #3
0
 def test_pow(self):
     """ verify pow(ts,num) pow(num,ts) pow(ts,ts) """
     a = TimeSeries(TimeAxis(time('2018-01-01T00:00:00Z'), time(3600), 3),
                    DoubleVector([1.0, 2.0, 3.0]), stair_case)
     b = TimeSeries(TimeAxis(time('2018-01-01T00:00:00Z'), time(3600), 3),
                    DoubleVector([2.0, 2.0, 2.0]), stair_case)
     assert_array_almost_equal([1, 4, 9], a.pow(2.0).values.to_numpy())
     assert_array_almost_equal([1, 4, 9], pow(a, 2.0).values.to_numpy())
     assert_array_almost_equal([2, 4, 8], pow(2.0, a).values.to_numpy())
     assert_array_almost_equal([2, 4, 8], pow(b, a).values.to_numpy())
     assert_array_almost_equal([2, 4, 8], b.pow(a).values.to_numpy())
 def test_create_source_vector_does_not_leak(self):
     n = 365 * 24 * 1  # 1st checkpoint memory here,
     for i in range(10):
         v = api.TemperatureSourceVector([
             api.TemperatureSource(
                 api.GeoPoint(0.0, 1.0, 2.0),
                 api.TimeSeries(api.TimeAxis(api.time(0), api.time(3600),
                                             n),
                                fill_value=float(x),
                                point_fx=api.POINT_AVERAGE_VALUE))
             for x in range(n)
         ])
         self.assertIsNotNone(v)
         del v
     pass  # 2nd mem check here, should be approx same as first checkpoint
Пример #5
0
 def test_forecasts_at_reference_times(self):
     test_ref = [0, 3600, 7200]
     fc = ForecastSelectionCriteria(forecasts_at_reference_times=test_ref)
     self.assertTrue(fc.criterion[0] == 'forecasts_at_reference_times')
     for t1, t2 in zip(test_ref, fc.criterion[1]):
         self.assertTrue(time(t1), t2)
     with self.assertRaises(ForecastSelectionCriteriaError):
         ForecastSelectionCriteria(
             forecasts_that_intersect_period='xhmm throw')
Пример #6
0
 def _validate(self):
     k, v = self._selected_criterion
     if k == 'forecasts_created_within_period':
         if not isinstance(v, api.UtcPeriod):
             raise ForecastSelectionCriteriaError(
                 "'forecasts_created_within_period' selection criteria should be of type api.UtcPeriod."
             )
     elif k == 'forecasts_with_start_within_period':
         if not isinstance(v, api.UtcPeriod):
             raise ForecastSelectionCriteriaError(
                 "'forecasts_with_start_within_period' selection criteria should be of type api.UtcPeriod."
             )
     elif k == 'forecasts_that_cover_period':
         if not isinstance(v, api.UtcPeriod):
             raise ForecastSelectionCriteriaError(
                 "'forecasts_that_cover_period' selection criteria should be of type api.UtcPeriod."
             )
     elif k == 'forecasts_that_intersect_period':
         if not isinstance(v, api.UtcPeriod):
             raise ForecastSelectionCriteriaError(
                 "'forecasts_that_intersect_period' selection criteria should be of type api.UtcPeriod."
             )
     elif k == 'latest_available_forecasts':
         v['forecasts_older_than'] = api.time(v['forecasts_older_than'])
         if not all([
                 isinstance(v, dict),
                 isinstance(v['number_of_forecasts'], int)
         ]):
             raise ForecastSelectionCriteriaError(
                 "'latest_available_forecasts' selection criteria should be of type dict with keys "
                 "'number_of_forecasts' and 'forecasts_older_than', and with values of type int and "
                 "api.time respectively.")
     elif k == 'forecasts_at_reference_times':
         if not isinstance(v, list):
             raise ForecastSelectionCriteriaError(
                 "'forecasts_at_reference_times' selection criteria should be of type list."
             )
         v[:] = [api.time(t) for t in v]
     else:
         raise ForecastSelectionCriteriaError(
             "Unrecognized forecast selection criteria.")
Пример #7
0
 def test_utctime_vector(self):
     dv_from_list = api.UtcTimeVector([x for x in range(10)])
     dv_np = np.arange(10, dtype=np.int64)
     dv_from_np = api.UtcTimeVector.from_numpy(dv_np)
     self.assertEqual(len(dv_from_list), 10)
     assert_array_almost_equal(dv_from_list.to_numpy(), dv_np)
     assert_array_almost_equal(dv_from_np.to_numpy(), dv_np)
     dv_from_np[5] = api.time(8)  # it should also have accepted any number here
     dv_from_np[5] = 8.5  # 8.5 seconds
     self.assertAlmostEqual(dv_from_np[5].seconds, 8.5)  # verify it stores microseconds
     dv_from_np.append(api.time(11))
     dv_from_np.push_back(12)  # this one takes any that could go as seconds
     dv_from_np.push_back(api.time(13))  # this one takes any that could go as seconds
     dv_np[5] = 8.5  # python/numpy silently ignore float -> int64
     dv_np.resize(13)
     dv_np[10] = 11
     dv_np[11] = 12
     dv_np[12] = 13
     assert_array_almost_equal(dv_from_np.to_numpy(), dv_np)
     dv2 = dv_from_np.to_numpy_double()
     self.assertAlmostEqual(dv2[5], 8.5)  # verify that to_numpy_double preserves microsecond
Пример #8
0
    def test_transform_functions_variable_interval(self):
        """
        test the _transform_raw function.
        """
        EPSG, bbox, bpoly = self.arome_epsg_bbox

        # Period start
        n_hours = 30
        t0 = api.YMDhms(2015, 8, 24, 0)
        date_str = "{}{:02}{:02}_{:02}".format(t0.year, t0.month, t0.day,
                                               t0.hour)
        utc = api.Calendar()  # No offset gives Utc

        base_dir = path.join(shyftdata_dir, "repository",
                             "arome_data_repository")
        f1 = "arome_metcoop_red_default2_5km_{}_diff_time_unit.nc".format(
            date_str)
        ar1 = MetNetcdfDataRepository(EPSG, base_dir, filename=f1)
        np_raw_array = np.array(
            [  # 0  # 1 #  2 #  3
                [1.0, 2.0, 3.0, 4.0], [1.1, 2.1, 3.1, 4.1],
                [1.2, 2.2, 3.2, 4.2], [1.4, 2.5, 3.6, 4.7]
            ],
            dtype=np.float64)
        raw_values = {
            'wind_speed': (np_raw_array, 'wind_speed', 'm/s'),
            'rel_hum': (np_raw_array, 'relative_humidity_2m', '?'),
            'temperature': (273.15 + np_raw_array, 'air_temperature_2m', 'K'),
            'radiation':
            (3600.0 * np_raw_array,
             'integral_of_surface_downwelling_shortwave_flux_in_air_wrt_time',
             'W s/m2'),
            'prepitation_acc':
            (np_raw_array, 'precipitation_amount_acc', 'Mg/m^2'),
            'prepitation': (np_raw_array, 'precipitation_amount', 'mm')
        }
        raw_time = np.array([0, 3600, 7200, 7200 + 2 * 3600],
                            dtype=np.int64)  # last step is 2 hours!

        rd = ar1._transform_raw(raw_values, raw_time)
        ta3 = api.TimeAxis(api.UtcTimeVector(raw_time[:-1]),
                           api.time(int(raw_time[-1])))
        ta4 = api.TimeAxis(
            api.UtcTimeVector(raw_time),
            api.time(int(raw_time[-1] +
                         2 * 3600)))  # assume last step is also 2 hours
        e_precip_acc = np.array(
            [  # 0  # 1 #  2 #  3
                [100.0, 100.0, 100.0, 100.0],
                [100.0, 100.0, 100.0, 100.0],
                [100.0, 150.0, 200.0, 250.0],
            ],
            dtype=np.float64)
        e_precip = np.array(
            [  # 0  # 1 #  2 #  3
                [1.1, 2.1, 3.1, 4.1], [1.2, 2.2, 3.2, 4.2],
                [1.4, 2.5, 3.6, 4.7]
            ],
            dtype=np.float64)
        e_rad = np.array(
            [  # 0  # 1 #  2 #  3
                [0.1, 0.1, 0.1, 0.1],
                [0.1, 0.1, 0.1, 0.1],
                [0.1, 0.15, 0.2, 0.25],
            ],
            dtype=np.float64)
        e = {
            'wind_speed': (np_raw_array, ta4),
            'rel_hum': (np_raw_array, ta4),
            'temperature': (np_raw_array, ta4),
            'radiation': (e_rad, ta3),
            'prepitation_acc': (e_precip_acc, ta3),
            'prepitation': (e_precip, ta3)
        }

        self.assertIsNotNone(rd)
        for k, r in rd.items():
            self.assertTrue(k in e)
            self.assertEqual(r[1], e[k][1], "expect correct time-axis")
            self.assertTrue(np.allclose(r[0], e[k][0]),
                            "expect exact correct values")
Пример #9
0
    def test_can_create_cf_compliant_file(self):
        # create files
        with TemporaryDirectory() as td:
            test_file = path.join(path.abspath(str(td)), 'shyft_test.nc')
            if path.exists(test_file):
                os.remove(test_file)
            # create meta info
            epsg_id = 32633
            x0 = 100000
            x1 = 200000
            y0 = 100000
            y1 = 200000
            x = 101000
            y = 101000
            z = 1200
            temperature = TimeSeriesMetaInfo('temperature', '/observed/at_stn_abc/temperature', 'observed air temperature',
                                             x, y, z, epsg_id)

            # create time axis
            utc = Calendar()
            ta = TimeAxis(utc.time(2016, 1, 1), deltahours(1), 24)
            data = np.arange(0, ta.size(), dtype=np.float64)
            ts = TimeSeries(ta, dv.from_numpy(data), point_fx=point_fx.POINT_INSTANT_VALUE)

            # save the first batch
            t_ds = TimeSeriesStore(test_file, temperature)
            t_ds.create_new_file()
            t_ds.append_ts_data(ts)

            # expected result
            ts_exp = ts

            # now read back the result using a *standard* shyft cf geo repository
            selection_criteria = box(x0, y0, x1, y1)
            ts_dr = CFDataRepository(epsg_id, test_file)
            # now read back 'temperature' that we know should be there
            rts_map = ts_dr.get_timeseries(['temperature'], UtcPeriod(ts_exp.time(0), ts_exp.time(ts_exp.size()-1)), geo_location_criteria=selection_criteria)

            # and verify that we get exactly back what we wanted.
            self.assertIsNotNone(rts_map)
            self.assertTrue('temperature' in rts_map)
            geo_temperature = rts_map['temperature']
            self.assertEqual(len(geo_temperature), 1)
            self.assertLessEqual(GeoPoint.distance2(geo_temperature[0].mid_point(), GeoPoint(x, y, z)), 1.0)
            # check if time axis is as expected
            self.assertEqual(geo_temperature[0].ts.time_axis, ts_exp.time_axis)
            self.assertTrue(np.allclose(geo_temperature[0].ts.time_axis.time_points, ts_exp.time_axis.time_points))
            self.assertEqual(geo_temperature[0].ts.point_interpretation(), point_fx.POINT_INSTANT_VALUE)
            # check if variable data is as expected
            self.assertTrue(np.allclose(geo_temperature[0].ts.values.to_numpy(), ts_exp.values.to_numpy()))

            # --------------------------------------
            # Append data
            print("\n\n append at the end data")
            # create time axis
            ta = TimeAxis(utc.time(2016, 1, 2), deltahours(1), 48)
            ts = TimeSeries(ta, dv.from_numpy(np.arange(0, ta.size(), dtype=np.float64)),
                            point_fx=point_fx.POINT_INSTANT_VALUE)
            # save the data
            t_ds.append_ts_data(ts)

            # expected result
            ta = TimeAxis(utc.time(2016, 1, 1), deltahours(1), 72)
            data = np.empty(72)
            data[:24] = np.arange(0, 24, dtype=np.float64)
            data[24:72] = np.arange(0, 48, dtype=np.float64)  # <-- new data
            ts_exp = TimeSeries(ta, dv.from_numpy(data), point_fx=point_fx.POINT_INSTANT_VALUE)

            # now read back the result using a *standard* shyft cf geo repository
            ts_dr = CFDataRepository(epsg_id, test_file)
            # now read back 'temperature' that we know should be there
            rts_map = ts_dr.get_timeseries(['temperature'], UtcPeriod(ts_exp.time(0), ts_exp.time(ts_exp.size()-1)), geo_location_criteria=selection_criteria)

            # and verify that we get exactly back what we wanted.
            self.assertIsNotNone(rts_map)
            self.assertTrue('temperature' in rts_map)
            geo_temperature = rts_map['temperature']
            self.assertEqual(len(geo_temperature), 1)
            self.assertLessEqual(GeoPoint.distance2(geo_temperature[0].mid_point(), GeoPoint(x, y, z)), 1.0)
            # check if time axis is as expected
            self.assertEqual(geo_temperature[0].ts.time_axis, ts_exp.time_axis)
            self.assertTrue(np.allclose(geo_temperature[0].ts.time_axis.time_points, ts_exp.time_axis.time_points))
            self.assertEqual(geo_temperature[0].ts.point_interpretation(), point_fx.POINT_INSTANT_VALUE)
            # check if variable data is as expected
            self.assertTrue(np.allclose(geo_temperature[0].ts.values.to_numpy(), ts_exp.values.to_numpy()))

            # --------------------------------------
            # Append with overlap
            print("\n\n append with overlap")
            # create time axis
            ta = TimeAxis(utc.time(2016, 1, 3), deltahours(1), 48)
            ts = TimeSeries(ta, dv.from_numpy(np.arange(0, ta.size(), dtype=np.float64)),
                            point_fx=point_fx.POINT_INSTANT_VALUE)
            # save the data
            t_ds.append_ts_data(ts)

            # expected result
            ta = TimeAxis(utc.time(2016, 1, 1), deltahours(1), 96)
            data = np.empty(96)
            data[:24] = np.arange(0, 24, dtype=np.float64)
            data[24:48] = np.arange(0, 24, dtype=np.float64)  # <-- new data
            data[48:96] = np.arange(0, 48, dtype=np.float64)  # <-- new data
            ts_exp = TimeSeries(ta, dv.from_numpy(data), point_fx=point_fx.POINT_INSTANT_VALUE)

            # now read back the result using a *standard* shyft cf geo repository
            ts_dr = CFDataRepository(epsg_id, test_file)
            # now read back 'temperature' that we know should be there
            rts_map = ts_dr.get_timeseries(['temperature'], UtcPeriod(ts_exp.time(0), ts_exp.time(ts_exp.size()-1)), geo_location_criteria=selection_criteria)

            # and verify that we get exactly back what we wanted.
            self.assertIsNotNone(rts_map)
            self.assertTrue('temperature' in rts_map)
            geo_temperature = rts_map['temperature']
            self.assertEqual(len(geo_temperature), 1)
            self.assertLessEqual(GeoPoint.distance2(geo_temperature[0].mid_point(), GeoPoint(x, y, z)), 1.0)
            # check if time axis is as expected
            self.assertEqual(geo_temperature[0].ts.time_axis, ts_exp.time_axis)
            self.assertTrue(np.allclose(geo_temperature[0].ts.time_axis.time_points, ts_exp.time_axis.time_points))
            self.assertEqual(geo_temperature[0].ts.point_interpretation(), point_fx.POINT_INSTANT_VALUE)
            # check if variable data is as expected
            self.assertTrue(np.allclose(geo_temperature[0].ts.values.to_numpy(), ts_exp.values.to_numpy()))

            # --------------------------------------
            # Append with gap in time axis
            print("\n\n Append with gap in time axis")
            # create time axis
            ta = TimeAxis(utc.time(2016, 1, 6), deltahours(1), 24)
            ts = TimeSeries(ta, dv.from_numpy(np.arange(0, ta.size(), dtype=np.float64)),
                            point_fx=point_fx.POINT_INSTANT_VALUE)
            # save the data
            t_ds.append_ts_data(ts)

            # expected result
            time_vals = np.append(TimeAxis(utc.time(2016, 1, 1), deltahours(1), 96).time_points[:-1], ta.time_points)
            # print(time_vals)
            ta = TimeAxis(UtcTimeVector.from_numpy(time_vals.astype(np.int64)))
            data = np.empty(120)
            data[:24] = np.arange(0, 24, dtype=np.float64)
            data[24:48] = np.arange(0, 24, dtype=np.float64)
            data[48:96] = np.arange(0, 48, dtype=np.float64)
            data[96:120] = np.arange(0, 24, dtype=np.float64)  # <-- new data
            ts_exp = TimeSeries(ta, dv.from_numpy(data), point_fx=point_fx.POINT_INSTANT_VALUE)

            # now read back the result using a *standard* shyft cf geo repository
            ts_dr = CFDataRepository(epsg_id, test_file)
            # now read back 'temperature' that we know should be there
            # print(ts_exp.total_period())
            rts_map = ts_dr.get_timeseries(['temperature'], UtcPeriod(ts_exp.time(0), ts_exp.time(ts_exp.size()-1)), geo_location_criteria=selection_criteria)

            # and verify that we get exactly back what we wanted.
            self.assertIsNotNone(rts_map)
            self.assertTrue('temperature' in rts_map)
            geo_temperature = rts_map['temperature']
            self.assertEqual(len(geo_temperature), 1)
            self.assertLessEqual(GeoPoint.distance2(geo_temperature[0].mid_point(), GeoPoint(x, y, z)), 1.0)
            # check if time axis is as expected
            # print(geo_temperature[0].ts.time_axis.time_points - ts_exp.time_axis.time_points)
            # print(geo_temperature[0].ts.time_axis.time_points - time_vals)
            # print(ts_exp.time_axis.time_points - time_vals)
            self.assertEqual(geo_temperature[0].ts.time_axis, ts_exp.time_axis)
            self.assertTrue(np.allclose(geo_temperature[0].ts.time_axis.time_points, ts_exp.time_axis.time_points))
            self.assertEqual(geo_temperature[0].ts.point_interpretation(), point_fx.POINT_INSTANT_VALUE)
            # check if variable data is as expected
            self.assertTrue(np.allclose(geo_temperature[0].ts.values.to_numpy(), ts_exp.values.to_numpy()))

            # --------------------------------------
            # Add new data in the middle where nothing was defined (no moving)
            print("\n\n Add new data in the middle where nothing was defined (no moving)")
            # create time axis
            ta = TimeAxis(utc.time(2016, 1, 2), deltahours(1), 24)
            ts = TimeSeries(ta, dv.from_numpy(np.arange(100, 100 + ta.size(), dtype=np.float64)),
                            point_fx=point_fx.POINT_INSTANT_VALUE)
            # save the data
            t_ds.append_ts_data(ts)

            # expected result
            time_vals = np.append(TimeAxis(utc.time(2016, 1, 1), deltahours(1), 96).time_points[:-1],
                                  TimeAxis(utc.time(2016, 1, 6), deltahours(1), 24).time_points)
            ta = TimeAxis(UtcTimeVector.from_numpy(time_vals.astype(np.int64)))
            data = np.empty(120)
            data[:24] = np.arange(0, 24, dtype=np.float64)
            data[24:48] = np.arange(100, 124, dtype=np.float64)  # <-- new data
            data[48:96] = np.arange(0, 48, dtype=np.float64)
            data[96:120] = np.arange(0, 24, dtype=np.float64)
            ts_exp = TimeSeries(ta, dv.from_numpy(data), point_fx=point_fx.POINT_INSTANT_VALUE)

            # now read back the result using a *standard* shyft cf geo repository
            ts_dr = CFDataRepository(epsg_id, test_file)
            # now read back 'temperature' that we know should be there
            rts_map = ts_dr.get_timeseries(['temperature'], UtcPeriod(ts_exp.time(0), ts_exp.time(ts_exp.size()-1)), geo_location_criteria=selection_criteria)
            # print(ts_exp.total_period())
            # and verify that we get exactly back what we wanted.
            self.assertIsNotNone(rts_map)
            self.assertTrue('temperature' in rts_map)
            geo_temperature = rts_map['temperature']
            self.assertEqual(len(geo_temperature), 1)
            self.assertLessEqual(GeoPoint.distance2(geo_temperature[0].mid_point(), GeoPoint(x, y, z)), 1.0)
            # check if time axis is as expected
            self.assertEqual(geo_temperature[0].ts.time_axis, ts_exp.time_axis)
            self.assertTrue(np.allclose(geo_temperature[0].ts.time_axis.time_points, ts_exp.time_axis.time_points))
            self.assertEqual(geo_temperature[0].ts.point_interpretation(), point_fx.POINT_INSTANT_VALUE)
            # check if variable data is as expected
            self.assertTrue(np.allclose(geo_temperature[0].ts.values.to_numpy(), ts_exp.values.to_numpy()))

            # --------------------------------------
            # Insert new data in the middle and move rest
            print("\n\n insert new data and move rest")
            # create time axis
            ta = TimeAxis(utc.time(2016, 1, 5), deltahours(1), 36)
            ts = TimeSeries(ta, dv.from_numpy(np.arange(200, 200 + ta.size(), dtype=np.float64)),
                            point_fx=point_fx.POINT_INSTANT_VALUE)
            # save the data
            t_ds.append_ts_data(ts)

            # expected result
            ta = TimeAxis(utc.time(2016, 1, 1), deltahours(1), 144)
            data = np.empty(144)
            data[:24] = np.arange(0, 24, dtype=np.float64)
            data[24:48] = np.arange(100, 124, dtype=np.float64)
            data[48:96] = np.arange(0, 48, dtype=np.float64)
            data[96:132] = np.arange(200, 236, dtype=np.float64)  # <-- new data
            data[132:144] = np.arange(12, 24, dtype=np.float64)
            ts_exp = TimeSeries(ta, dv.from_numpy(data), point_fx=point_fx.POINT_INSTANT_VALUE)

            # now read back the result using a *standard* shyft cf geo repository
            ts_dr = CFDataRepository(epsg_id, test_file)
            # now read back 'temperature' that we know should be there
            rts_map = ts_dr.get_timeseries(['temperature'], UtcPeriod(ts_exp.time(0), ts_exp.time(ts_exp.size()-1)), geo_location_criteria=selection_criteria)

            # and verify that we get exactly back what we wanted.
            self.assertIsNotNone(rts_map)
            self.assertTrue('temperature' in rts_map)
            geo_temperature = rts_map['temperature']
            self.assertEqual(len(geo_temperature), 1)
            self.assertLessEqual(GeoPoint.distance2(geo_temperature[0].mid_point(), GeoPoint(x, y, z)), 1.0)
            # check if time axis is as expected
            self.assertEqual(geo_temperature[0].ts.time_axis, ts_exp.time_axis)
            self.assertTrue(np.allclose(geo_temperature[0].ts.time_axis.time_points, ts_exp.time_axis.time_points))
            self.assertEqual(geo_temperature[0].ts.point_interpretation(), point_fx.POINT_INSTANT_VALUE)
            # check if variable data is as expected
            self.assertTrue(np.allclose(geo_temperature[0].ts.values.to_numpy(), ts_exp.values.to_numpy()))

            # --------------------------------------
            # Add new data before existing data without overlap
            print("\n\n add new data before existing data without overlap")
            # create time axis
            ta = TimeAxis(utc.time(2015, 12, 31), deltahours(1), 24)
            ts = TimeSeries(ta, dv.from_numpy(np.arange(300, 300 + ta.size(), dtype=np.float64)),
                            point_fx=point_fx.POINT_INSTANT_VALUE)
            # save the first batch
            t_ds.append_ts_data(ts)

            # expected result
            ta = TimeAxis(utc.time(2015, 12, 31), deltahours(1), 168)
            data = np.empty(168)
            data[:24] = np.arange(300, 324, dtype=np.float64)  # <-- new data
            data[24:48] = np.arange(0, 24, dtype=np.float64)
            data[48:72] = np.arange(100, 124, dtype=np.float64)
            data[72:120] = np.arange(0, 48, dtype=np.float64)
            data[120:156] = np.arange(200, 236, dtype=np.float64)
            data[156:168] = np.arange(12, 24, dtype=np.float64)
            ts_exp = TimeSeries(ta, dv.from_numpy(data), point_fx=point_fx.POINT_INSTANT_VALUE)

            # now read back the result using a *standard* shyft cf geo repository
            ts_dr = CFDataRepository(epsg_id, test_file)
            # now read back 'temperature' that we know should be there
            rts_map = ts_dr.get_timeseries(['temperature'], UtcPeriod(ts_exp.time(0), ts_exp.time(ts_exp.size()-1)), geo_location_criteria=selection_criteria)

            # and verify that we get exactly back what we wanted.
            self.assertIsNotNone(rts_map)
            self.assertTrue('temperature' in rts_map)
            geo_temperature = rts_map['temperature']
            self.assertEqual(len(geo_temperature), 1)
            self.assertLessEqual(GeoPoint.distance2(geo_temperature[0].mid_point(), GeoPoint(x, y, z)), 1.0)
            # check if time axis is as expected
            self.assertEqual(geo_temperature[0].ts.time_axis, ts_exp.time_axis)
            self.assertTrue(np.allclose(geo_temperature[0].ts.time_axis.time_points, ts_exp.time_axis.time_points))
            self.assertEqual(geo_temperature[0].ts.point_interpretation(), point_fx.POINT_INSTANT_VALUE)
            # check if variable data is as expected
            self.assertTrue(np.allclose(geo_temperature[0].ts.values.to_numpy(), ts_exp.values.to_numpy()))

            # --------------------------------------
            # add new data before existing data with overlap
            print("\n\n add new data before existing data with overlap")
            # create time axis
            ta = TimeAxis(utc.time(2015, 12, 30), deltahours(1), 36)
            ts = TimeSeries(ta, dv.from_numpy(np.arange(400, 400 + ta.size(), dtype=np.float64)),
                            point_fx=point_fx.POINT_INSTANT_VALUE)
            # save the first batch
            # t_ds = TimeSeriesStore(test_file, temperature)
            t_ds.append_ts_data(ts)

            # expected result
            ta = TimeAxis(utc.time(2015, 12, 30), deltahours(1), 192)
            data = np.empty(192)
            data[:36] = np.arange(400, 436, dtype=np.float64)  # <-- new data
            data[36:48] = np.arange(312, 324, dtype=np.float64)
            data[48:72] = np.arange(0, 24, dtype=np.float64)
            data[72:96] = np.arange(100, 124, dtype=np.float64)
            data[96:144] = np.arange(0, 48, dtype=np.float64)
            data[144:180] = np.arange(200, 236, dtype=np.float64)
            data[180:192] = np.arange(12, 24, dtype=np.float64)
            ts_exp = TimeSeries(ta, dv.from_numpy(data), point_fx=point_fx.POINT_INSTANT_VALUE)

            # now read back the result using a *standard* shyft cf geo repository
            ts_dr = CFDataRepository(epsg_id, test_file)
            # now read back 'temperature' that we know should be there
            rts_map = ts_dr.get_timeseries(['temperature'], UtcPeriod(ts_exp.time(0), ts_exp.time(ts_exp.size()-1)), geo_location_criteria=selection_criteria)

            # and verify that we get exactly back what we wanted.
            self.assertIsNotNone(rts_map)
            self.assertTrue('temperature' in rts_map)
            geo_temperature = rts_map['temperature']
            self.assertEqual(len(geo_temperature), 1)
            self.assertLessEqual(GeoPoint.distance2(geo_temperature[0].mid_point(), GeoPoint(x, y, z)), 1.0)
            # check if time axis is as expected
            self.assertEqual(geo_temperature[0].ts.time_axis, ts_exp.time_axis)
            self.assertTrue(np.allclose(geo_temperature[0].ts.time_axis.time_points, ts_exp.time_axis.time_points))
            self.assertEqual(geo_temperature[0].ts.point_interpretation(), point_fx.POINT_INSTANT_VALUE)
            # check if variable data is as expected
            self.assertTrue(np.allclose(geo_temperature[0].ts.values.to_numpy(), ts_exp.values.to_numpy()))

            # --------------------------------------
            # Overwrite everything with less data points
            # create time axis
            print('\n\n Overwrite everything with less data points')
            ta = TimeAxis(utc.time(2015, 12, 30), deltahours(24), 9)
            ts = TimeSeries(ta, dv.from_numpy(np.arange(1000, 1000 + ta.size(), dtype=np.float64)),
                            point_fx=point_fx.POINT_INSTANT_VALUE)
            # write the time series
            t_ds.append_ts_data(ts)

            # expected result
            ts_exp = ts

            # now read back the result using a *standard* shyft cf geo repository
            ts_dr = CFDataRepository(epsg_id, test_file)
            # now read back 'temperature' that we know should be there
            rts_map = ts_dr.get_timeseries(['temperature'], UtcPeriod(ts_exp.time(0), ts_exp.time(ts_exp.size()-1)), geo_location_criteria=selection_criteria)

            # and verify that we get exactly back what we wanted.
            self.assertIsNotNone(rts_map)
            self.assertTrue('temperature' in rts_map)
            geo_temperature = rts_map['temperature']
            self.assertEqual(len(geo_temperature), 1)
            self.assertLessEqual(GeoPoint.distance2(geo_temperature[0].mid_point(), GeoPoint(x, y, z)), 1.0)
            # check if time axis is as expected
            self.assertEqual(geo_temperature[0].ts.time_axis, ts_exp.time_axis)
            self.assertTrue(np.allclose(geo_temperature[0].ts.time_axis.time_points, ts_exp.time_axis.time_points))
            self.assertEqual(geo_temperature[0].ts.point_interpretation(), point_fx.POINT_INSTANT_VALUE)
            # check if variable data is as expected
            self.assertTrue(np.allclose(geo_temperature[0].ts.values.to_numpy(), ts_exp.values.to_numpy()))

            # --------------------------------------
            # Insert data with different dt
            # create time axis
            print('\n\n Insert data with different dt')
            ta = TimeAxis(utc.time(2016, 1, 1), deltahours(1), 24)
            ts = TimeSeries(ta, dv.from_numpy(np.arange(0, 24, dtype=np.float64)),
                            point_fx=point_fx.POINT_INSTANT_VALUE)
            # write the time series
            t_ds.append_ts_data(ts)

            # expected result
            time_points = np.empty(33, dtype=np.int)
            time_points[0:2] = TimeAxis(utc.time(2015, 12, 30), deltahours(24), 1).time_points
            time_points[2:26] = TimeAxis(utc.time(2016, 1, 1), deltahours(1), 23).time_points
            time_points[26:] = TimeAxis(utc.time(2016, 1, 2), deltahours(24), 6).time_points
            ta = TimeAxis(UtcTimeVector.from_numpy(time_points))
            data = np.empty(32)
            data[0:2] = np.array([1000, 1001])
            data[2:26] = np.arange(0, 24)  # <-- new data
            data[26:] = np.arange(1003, 1009)
            ts_exp = TimeSeries(ta, dv.from_numpy(data), point_fx=point_fx.POINT_INSTANT_VALUE)

            # now read back the result using a *standard* shyft cf geo repository
            ts_dr = CFDataRepository(epsg_id, test_file)
            # now read back 'temperature' that we know should be there
            rts_map = ts_dr.get_timeseries(['temperature'], UtcPeriod(ts_exp.time(0), ts_exp.time(ts_exp.size()-1)), geo_location_criteria=selection_criteria)

            # and verify that we get exactly back what we wanted.
            self.assertIsNotNone(rts_map)
            self.assertTrue('temperature' in rts_map)
            geo_temperature = rts_map['temperature']
            self.assertEqual(len(geo_temperature), 1)
            self.assertLessEqual(GeoPoint.distance2(geo_temperature[0].mid_point(), GeoPoint(x, y, z)), 1.0)
            # check if time axis is as expected
            self.assertEqual(geo_temperature[0].ts.time_axis, ts_exp.time_axis)
            self.assertTrue(np.allclose(geo_temperature[0].ts.time_axis.time_points, ts_exp.time_axis.time_points))
            self.assertEqual(geo_temperature[0].ts.point_interpretation(), point_fx.POINT_INSTANT_VALUE)
            # check if variable data is as expected
            self.assertTrue(np.allclose(geo_temperature[0].ts.values.to_numpy(), ts_exp.values.to_numpy()))

            # --------------------------------------
            # delete data with range UtcPeriod in the middle
            print('\n\n delete data with range UtcPeriod')
            tp = UtcPeriod(utc.time(2015, 12, 31), utc.time(2016, 1, 1, 12))
            # ta = TimeAxis(utc.time(2016, 1, 1), deltahours(1), 24)
            # ts = TimeSeries(ta, dv.from_numpy(np.arange(0, 24, dtype=np.float64)), point_fx=point_fx.POINT_INSTANT_VALUE)
            # write the time series
            t_ds.remove_tp_data(tp)

            # expected result
            time_points = np.array([1451433600, 1451653200, 1451656800, 1451660400, 1451664000, 1451667600,
                                    1451671200, 1451674800, 1451678400, 1451682000, 1451685600, 1451689200,
                                    1451692800, 1451779200, 1451865600, 1451952000, 1452038400, 1452124800,
                                    1452211200])
            ta = TimeAxis(UtcTimeVector.from_numpy(time_points))
            data = np.array([1000, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 1003, 1004, 1005, 1006, 1007, 1008])
            ts_exp = TimeSeries(ta, dv.from_numpy(data),point_fx.POINT_INSTANT_VALUE)  # TODO: is this correct policy to use

            # now read back the result using a *standard* shyft cf geo repository
            ts_dr = CFDataRepository(epsg_id, test_file)
            # now read back 'temperature' that we know should be there
            rts_map = ts_dr.get_timeseries(['temperature'], UtcPeriod(ts_exp.time(0), ts_exp.time(ts_exp.size()-1)), geo_location_criteria=selection_criteria)

            # and verify that we get exactly back what we wanted.
            self.assertIsNotNone(rts_map)
            self.assertTrue('temperature' in rts_map)
            geo_temperature = rts_map['temperature']
            self.assertEqual(len(geo_temperature), 1)
            self.assertLessEqual(GeoPoint.distance2(geo_temperature[0].mid_point(), GeoPoint(x, y, z)), 1.0)
            # check if time axis is as expected
            self.assertEqual(geo_temperature[0].ts.time_axis, ts_exp.time_axis)
            self.assertTrue(np.allclose(geo_temperature[0].ts.time_axis.time_points, ts_exp.time_axis.time_points))
            self.assertEqual(geo_temperature[0].ts.point_interpretation(), point_fx.POINT_INSTANT_VALUE)
            # check if variable data is as expected
            self.assertTrue(np.allclose(geo_temperature[0].ts.values.to_numpy(), ts_exp.values.to_numpy()))

            # --------------------------------------
            # delete data with range UtcPeriod at the start
            print('\n\n delete data with range UtcPeriod at the start')
            tp = UtcPeriod(1451433600, 1451667600)
            # ta = TimeAxis(utc.time(2016, 1, 1), deltahours(1), 24)
            # ts = TimeSeries(ta, dv.from_numpy(np.arange(0, 24, dtype=np.float64)), point_fx=point_fx.POINT_INSTANT_VALUE)
            # write the time series
            t_ds.remove_tp_data(tp)

            # expected result
            time_points = np.array([1451671200, 1451674800, 1451678400, 1451682000, 1451685600, 1451689200,
                                    1451692800, 1451779200, 1451865600, 1451952000, 1452038400, 1452124800,
                                    1452211200])
            ta = TimeAxis(UtcTimeVector.from_numpy(time_points))
            data = np.array([18, 19, 20, 21, 22, 23, 1003, 1004, 1005, 1006, 1007, 1008])
            ts_exp = TimeSeries(ta, dv.from_numpy(data), point_fx.POINT_INSTANT_VALUE)  # TODO: is this correct policy to use for this test

            # now read back the result using a *standard* shyft cf geo repository
            ts_dr = CFDataRepository(epsg_id, test_file)
            # now read back 'temperature' that we know should be there
            rts_map = ts_dr.get_timeseries(['temperature'], UtcPeriod(ts_exp.time(0), ts_exp.time(ts_exp.size()-1)), geo_location_criteria=selection_criteria)

            # and verify that we get exactly back what we wanted.
            self.assertIsNotNone(rts_map)
            self.assertTrue('temperature' in rts_map)
            geo_temperature = rts_map['temperature']
            self.assertEqual(len(geo_temperature), 1)
            self.assertLessEqual(GeoPoint.distance2(geo_temperature[0].mid_point(), GeoPoint(x, y, z)), 1.0)
            # check if time axis is as expected
            self.assertEqual(geo_temperature[0].ts.time_axis, ts_exp.time_axis)
            self.assertTrue(np.allclose(geo_temperature[0].ts.time_axis.time_points, ts_exp.time_axis.time_points))
            self.assertEqual(geo_temperature[0].ts.point_interpretation(), point_fx.POINT_INSTANT_VALUE)
            # check if variable data is as expected
            self.assertTrue(np.allclose(geo_temperature[0].ts.values.to_numpy(), ts_exp.values.to_numpy()))

            # --------------------------------------
            # delete data with range UtcPeriod at the end
            print('\n\n delete data with range UtcPeriod at the end')
            tp = UtcPeriod(time(1451952000), utc.time(2016, 1, 10))
            # ta = TimeAxis(utc.time(2016, 1, 1), deltahours(1), 24)
            # ts = TimeSeries(ta, dv.from_numpy(np.arange(0, 24, dtype=np.float64)), point_fx=point_fx.POINT_INSTANT_VALUE)
            # write the time series
            t_ds.remove_tp_data(tp)

            # expected result
            time_points = np.array([1451671200, 1451674800, 1451678400, 1451682000, 1451685600, 1451689200,
                                    1451692800, 1451779200, 1451865600, 1451952000])
            ta = TimeAxis(UtcTimeVector.from_numpy(time_points))
            data = np.array([18, 19, 20, 21, 22, 23, 1003, 1004, 1005])
            ts_exp = TimeSeries(ta, dv.from_numpy(data), point_fx.POINT_INSTANT_VALUE)

            # now read back the result using a *standard* shyft cf geo repository
            ts_dr = CFDataRepository(epsg_id, test_file)
            # now read back 'temperature' that we know should be there
            try:
                rts_map = ts_dr.get_timeseries(['temperature'], UtcPeriod(ts_exp.time(0), ts_exp.time(ts_exp.size()-1)), geo_location_criteria=selection_criteria)
            except CFDataRepositoryError:
                pass

            # and verify that we get exactly back what we wanted.
            self.assertIsNotNone(rts_map)
            self.assertTrue('temperature' in rts_map)
            geo_temperature = rts_map['temperature']
            self.assertEqual(len(geo_temperature), 1)
            self.assertLessEqual(GeoPoint.distance2(geo_temperature[0].mid_point(), GeoPoint(x, y, z)), 1.0)
            # check if time axis is as expected
            self.assertEqual(geo_temperature[0].ts.time_axis, ts_exp.time_axis)
            self.assertTrue(np.allclose(geo_temperature[0].ts.time_axis.time_points, ts_exp.time_axis.time_points))
            self.assertEqual(geo_temperature[0].ts.point_interpretation(), point_fx.POINT_INSTANT_VALUE)
            # check if variable data is as expected
            self.assertTrue(np.allclose(geo_temperature[0].ts.values.to_numpy(), ts_exp.values.to_numpy()))

            # --------------------------------------
            # delete data with range UtcPeriod everything
            print('\n\n delete data with range UtcPeriod everything')
            tp = UtcPeriod(utc.time(2016, 1, 1), utc.time(2016, 1, 10))
            # write the time series
            t_ds.remove_tp_data(tp)

            # now read back the result using a *standard* shyft cf geo repository
            ts_dr = CFDataRepository(epsg_id, test_file)
            # now read back 'temperature' that we know should be there
            self.assertRaises(CFDataRepositoryError, ts_dr.get_timeseries, ['temperature'], tp, selection_criteria)

            # --------------------------------------
            # insert data in between time saved data points
            print('\n\n insert data in between time saved data points')
            # insert first data in which we want to insert the second batch
            utc = Calendar()
            ta = TimeAxis(utc.time(2016, 1, 1), deltahours(24), 2)
            data = np.arange(0, ta.size(), dtype=np.float64)
            ts = TimeSeries(ta, dv.from_numpy(data), point_fx=point_fx.POINT_INSTANT_VALUE)
            # save the first batch
            t_ds.append_ts_data(ts)

            # insert first data for every hour in between
            utc = Calendar()
            ta = TimeAxis(utc.time(2016, 1, 1) + deltahours(1), deltahours(1), 23)
            data = np.arange(10, 10 + ta.size(), dtype=np.float64)
            ts = TimeSeries(ta, dv.from_numpy(data), point_fx=point_fx.POINT_INSTANT_VALUE)
            # save the first batch
            t_ds.append_ts_data(ts)

            # expected result
            time_points = np.array([1451606400, 1451610000, 1451613600, 1451617200, 1451620800, 1451624400, 1451628000,
                                    1451631600, 1451635200, 1451638800, 1451642400, 1451646000, 1451649600, 1451653200,
                                    1451656800, 1451660400, 1451664000, 1451667600, 1451671200, 1451674800, 1451678400,
                                    1451682000, 1451685600, 1451689200, 1451692800, 0])
            time_points[-1] = 2 * time_points[-2] - time_points[-3]  # last time point calc
            data = np.array([0, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
                             27, 28, 29, 30, 31, 32, 1])
            ta = TimeAxis(UtcTimeVector.from_numpy(time_points))
            ts_exp = TimeSeries(ta, dv.from_numpy(data), point_fx.POINT_INSTANT_VALUE)  # TODO: is this correct policy value for this case

            # now read back the result using a *standard* shyft cf geo repository
            ts_dr = CFDataRepository(epsg_id, test_file)
            # now read back 'temperature' that we know should be there
            rts_map = ts_dr.get_timeseries(['temperature'], UtcPeriod(ts_exp.time(0), ts_exp.time(ts_exp.size()-1)), geo_location_criteria=selection_criteria)

            # and verify that we get exactly back what we wanted.
            self.assertIsNotNone(rts_map)
            self.assertTrue('temperature' in rts_map)
            geo_temperature = rts_map['temperature']
            self.assertEqual(len(geo_temperature), 1)
            self.assertLessEqual(GeoPoint.distance2(geo_temperature[0].mid_point(), GeoPoint(x, y, z)), 1.0)
            # check if time axis is as expected
            self.assertEqual(geo_temperature[0].ts.time_axis, ts_exp.time_axis)
            self.assertTrue(np.allclose(geo_temperature[0].ts.time_axis.time_points, ts_exp.time_axis.time_points))
            self.assertEqual(geo_temperature[0].ts.point_interpretation(), point_fx.POINT_INSTANT_VALUE)
            # check if variable data is as expected
            self.assertTrue(np.allclose(geo_temperature[0].ts.values.to_numpy(), ts_exp.values.to_numpy()))

            # --------------------------------------
            # insert data including nan
            print('\n\n insert data including nan')
            utc = Calendar()
            ta = TimeAxis(utc.time(2016, 1, 1) + deltahours(1), deltahours(1), 23)
            data = np.arange(10, 10 + ta.size(), dtype=np.float64)
            data[4] = np.nan
            data[6] = np.nan  # np.inf, but trouble getting inf trough all version of numpy/netcdf
            data[8] = np.nan  # -np.inf, --"--
            ts = TimeSeries(ta, dv.from_numpy(data), point_fx=point_fx.POINT_INSTANT_VALUE)
            # save the first batch
            t_ds.append_ts_data(ts)

            # expected result
            time_points = np.array([1451606400, 1451610000, 1451613600, 1451617200, 1451620800, 1451624400, 1451628000,
                                    1451631600, 1451635200, 1451638800, 1451642400, 1451646000, 1451649600, 1451653200,
                                    1451656800, 1451660400, 1451664000, 1451667600, 1451671200, 1451674800, 1451678400,
                                    1451682000, 1451685600, 1451689200, 1451692800, 0])
            time_points[-1] = 2 * time_points[-2] - time_points[-3]  # last time point calc

            data = np.array([0, 10, 11, 12, 13, np.nan, 15,
                             # np.inf,
                             np.nan,  # TODO: figure out how to unmask restoring 'used' mask-values
                             17,
                             #-np.inf,
                             np.nan,
                             19, 20, 21, 22, 23, 24, 25, 26,
                             27, 28, 29, 30, 31, 32, 1])
            ta = TimeAxis(UtcTimeVector.from_numpy(time_points))
            ts_exp = TimeSeries(ta, dv.from_numpy(data), point_fx.POINT_INSTANT_VALUE)  # TODO: policy right ?

            # now read back the result using a *standard* shyft cf geo repository
            ts_dr = CFDataRepository(epsg_id, test_file)
            # now read back 'temperature' that we know should be there
            rts_map = ts_dr.get_timeseries(['temperature'], UtcPeriod(ts_exp.time(0), ts_exp.time(ts_exp.size()-1)), geo_location_criteria=selection_criteria)

            # and verify that we get exactly back what we wanted.
            self.assertIsNotNone(rts_map)
            self.assertTrue('temperature' in rts_map)
            geo_temperature = rts_map['temperature']
            self.assertEqual(len(geo_temperature), 1)
            self.assertLessEqual(GeoPoint.distance2(geo_temperature[0].mid_point(), GeoPoint(x, y, z)), 1.0)
            # check if time axis is as expected
            self.assertEqual(geo_temperature[0].ts.time_axis, ts_exp.time_axis)
            self.assertTrue(np.allclose(geo_temperature[0].ts.time_axis.time_points, ts_exp.time_axis.time_points))
            self.assertEqual(geo_temperature[0].ts.point_interpretation(), point_fx.POINT_INSTANT_VALUE)
            # check if variable data is as expected
            self.assertTrue(np.allclose(geo_temperature[0].ts.values.to_numpy(), ts_exp.values.to_numpy(), equal_nan=True))
Пример #10
0
 def test_time_cast(self):
     self.assertAlmostEqual(int(api.time(10.23)), 10)
     self.assertAlmostEqual(float(api.time(10.23)), 10.23)
     self.assertAlmostEqual(api.time(1.23).seconds, 1.23)
Пример #11
0
 def test_time_round(self):
     self.assertAlmostEqual(round(api.time(3.2)), 3.0)
     self.assertAlmostEqual(round(api.time(-3.7)), -4.0)
Пример #12
0
 def test_time_floor(self):
     self.assertAlmostEqual(math.floor(api.time(3.2)), 3.0)
     self.assertAlmostEqual(math.floor(api.time(-3.2)), -4.0)
Пример #13
0
 def test_time_compare(self):
     self.assertTrue(api.time(123) == api.time(123))
     self.assertTrue(api.time(123) == 123)
     self.assertTrue(api.time(123) != api.time(123.2))
     self.assertTrue(api.time(123) != 123.4)
     self.assertTrue(api.time(123) <= api.time(123.2))
     self.assertTrue(api.time(123) <= api.time(123))
     self.assertTrue(api.time(1234) >= 134)
     self.assertTrue(api.time(1234) >= 1234)
     self.assertTrue(api.time(123) < api.time(123.2))
     self.assertTrue(api.time(1234) > 134)
Пример #14
0
 def test_time_construct(self):
     self.assertAlmostEqual(api.time().seconds, 0.0, msg='default value should be 0.0')
     self.assertAlmostEqual(api.time(3123456).seconds, 3123456, msg='should be constructible from integer type')
     self.assertAlmostEqual(api.time(3.123456).seconds, 3.123456, msg='should be constructible from float type')
     self.assertAlmostEqual(api.time('1970-01-01T00:00:23Z').seconds, 23.0, msg='should be constructible from iso 8601 string')
Пример #15
0
    def test_transform_functions_fixed_interval(self):
        """
        test the _transform_raw function.
        """
        return
        # TODO: add concat file in shyft-data, then implement the tests
        EPSG, bbox, bpoly = self._epsg_bbox

        # Period start
        t0 = api.YMDhms(2015, 8, 24, 0)
        date_str = "{}{:02}{:02}_{:02}".format(t0.year, t0.month, t0.day,
                                               t0.hour)
        utc = api.Calendar()  # No offset gives Utc

        f1 = path.join(
            shyftdata_dir, "repository", "arome_data_repository",
            f"arome_metcoop_red_default2_5km_{date_str}_diff_time_unit.nc")
        ar1 = ConcatDataRepository(epsg=EPSG, filename=f1)
        np_raw_array = np.array(
            [  # 0  # 1 #  2 #  3
                [1.0, 2.0, 3.0, 4.0], [1.1, 2.1, 3.1, 4.1],
                [1.2, 2.2, 3.2, 4.2], [1.4, 2.5, 3.6, 4.7]
            ],
            dtype=np.float64)
        raw_values = {
            'wind_speed': (np_raw_array, 'wind_speed', 'm/s'),
            'rel_hum': (np_raw_array, 'relative_humidity_2m', '?'),
            'temperature': (273.15 + np_raw_array, 'air_temperature_2m', 'K'),
            'radiation':
            (3600.0 * np_raw_array,
             'integral_of_surface_downwelling_shortwave_flux_in_air_wrt_time',
             'W s/m2'),
            'prepitation_acc':
            (np_raw_array, 'precipitation_amount_acc', 'Mg/m^2'),
            'prepitation': (np_raw_array, 'precipitation_amount', 'mm')
        }
        raw_time = np.array([0, 3600, 7200, 10800], dtype=np.int64)

        rd = ar1._transform_raw(raw_values, raw_time)
        ta3 = api.TimeAxis(api.time(0), api.time(3600), 3)
        ta4 = api.TimeAxis(api.time(0), api.time(3600), 4)
        e_precip_acc = np.array(
            [  # 0  # 1 #  2 #  3
                [100.0, 100.0, 100.0, 100.0],
                [100.0, 100.0, 100.0, 100.0],
                [200.0, 300.0, 400.0, 500.0],
            ],
            dtype=np.float64)
        e_precip = np.array(
            [  # 0  # 1 #  2 #  3
                [1.1, 2.1, 3.1, 4.1], [1.2, 2.2, 3.2, 4.2],
                [1.4, 2.5, 3.6, 4.7]
            ],
            dtype=np.float64)
        e_rad = np.array(
            [  # 0  # 1 #  2 #  3
                [0.1, 0.1, 0.1, 0.1],
                [0.1, 0.1, 0.1, 0.1],
                [0.2, 0.3, 0.4, 0.5],
            ],
            dtype=np.float64)
        e = {
            'wind_speed': (np_raw_array, ta4),
            'rel_hum': (np_raw_array, ta4),
            'temperature': (np_raw_array, ta4),
            'radiation': (e_rad, ta3),
            'prepitation_acc': (e_precip_acc, ta3),
            'prepitation': (e_precip, ta3)
        }

        self.assertIsNotNone(rd)
        for k, r in rd.items():
            self.assertTrue(k in e)
            self.assertEqual(r[1], e[k][1], "expect correct time-axis")
            self.assertTrue(np.allclose(r[0], e[k][0]),
                            "expect exact correct values")