コード例 #1
0
def set_up_variable_cube(data,
                         name="temperature",
                         units="degC",
                         xo=400000.0,
                         yo=0.0,
                         attributes=None):
    """
    Set up cube containing diagnostic variable data for regridding tests.
    Data are on a 2 km Transverse Mercator grid with an inverted y-axis,
    located in the UK.
    """
    y_points = 2000.0 * (data.shape[0] - np.arange(data.shape[0])) + yo
    x_points = 2000.0 * np.arange(data.shape[1]) + xo

    y_coord = DimCoord(y_points,
                       "projection_y_coordinate",
                       units="m",
                       coord_system=TMercCS)
    x_coord = DimCoord(x_points,
                       "projection_x_coordinate",
                       units="m",
                       coord_system=TMercCS)

    time_coords = construct_scalar_time_coords(datetime(2015, 11, 23, 4, 30),
                                               None,
                                               datetime(2015, 11, 22, 22, 30))
    cube = iris.cube.Cube(
        data,
        long_name=name,
        units=units,
        attributes=attributes,
        dim_coords_and_dims=[(y_coord, 0), (x_coord, 1)],
        aux_coords_and_dims=time_coords,
    )
    return cube
コード例 #2
0
    def test_scalar_coords(self):
        """Test additional scalar coordinates"""
        [(time_coord, _), (frt_coord, _), (fp_coord, _)
         ] = construct_scalar_time_coords(datetime(2015, 11, 23, 4, 30), None,
                                          datetime(2015, 11, 22, 22, 30))

        data = np.ones((2, 4), dtype=np.float32)
        result = build_spotdata_cube(
            data,
            "air_temperature",
            "degC",
            self.altitude,
            self.latitude,
            self.longitude,
            self.wmo_id,
            scalar_coords=[time_coord, frt_coord, fp_coord],
            neighbour_methods=self.neighbour_methods,
        )

        # pylint: disable=unsubscriptable-object
        self.assertEqual(result.coord("time").points[0], time_coord.points[0])
        self.assertEqual(
            result.coord("forecast_reference_time").points[0],
            frt_coord.points[0])
        self.assertEqual(
            result.coord("forecast_period").points[0], fp_coord.points[0])
コード例 #3
0
    def test_basic(self):
        """Test times can be set"""
        coord_dims = construct_scalar_time_coords(datetime(2017, 12, 1, 14, 0),
                                                  None,
                                                  datetime(2017, 12, 1, 9, 0))
        time_coords = [item[0] for item in coord_dims]

        for crd in time_coords:
            self.assertIsInstance(crd, iris.coords.DimCoord)

        self.assertEqual(time_coords[0].name(), "time")
        self.assertEqual(
            iris_time_to_datetime(time_coords[0])[0],
            datetime(2017, 12, 1, 14, 0))
        self.assertEqual(time_coords[1].name(), "forecast_reference_time")
        self.assertEqual(
            iris_time_to_datetime(time_coords[1])[0],
            datetime(2017, 12, 1, 9, 0))
        self.assertEqual(time_coords[2].name(), "forecast_period")
        self.assertEqual(time_coords[2].points[0], 3600 * 5)

        for crd in time_coords[:2]:
            self.assertEqual(crd.dtype, np.int64)
            self.assertEqual(crd.units, "seconds since 1970-01-01 00:00:00")
        self.assertEqual(time_coords[2].units, "seconds")
        self.assertEqual(time_coords[2].dtype, np.int32)
コード例 #4
0
ファイル: conftest.py プロジェクト: zfan001/improver
def spot_fixture():
    """Spot temperature cube"""
    alts = np.array([15, 82, 0, 4, 15, 269], dtype=np.float32)
    lats = np.array([60.75, 60.13, 58.95, 57.37, 58.22, 57.72],
                    dtype=np.float32)
    lons = np.array([-0.85, -1.18, -2.9, -7.40, -6.32, -4.90],
                    dtype=np.float32)
    wmo_ids = np.array(["3002", "3005", "3017", "3023", "3026", "3031"])
    spot_cube = build_spotdata_cube(
        np.arange(6).astype(np.float32),
        "air_temperature",
        "degC",
        alts,
        lats,
        lons,
        wmo_ids,
    )
    spot_cube.add_aux_coord(
        iris.coords.AuxCoord([50], long_name="percentile", units="%"))
    spot_cube.attributes = {
        "source": "IMPROVER",
        "institution": "Met Office",
        "title": "IMPROVER Post-Processed Multi-Model Blend UK Spot Values",
        "mosg__model_configuration": "uk_det uk_ens",
    }
    (time, _), (blend_time, _), (_, _) = construct_scalar_time_coords(
        time=datetime(2021, 2, 3, 14),
        time_bounds=None,
        frt=datetime(2021, 2, 3, 10))
    blend_time.rename("blend_time")
    spot_cube.add_aux_coord(time)
    spot_cube.add_aux_coord(blend_time)
    return spot_cube
コード例 #5
0
 def test_error_negative_fp(self):
     """Test an error is raised if the calculated forecast period is
     negative"""
     msg = "Cannot set up cube with negative forecast period"
     with self.assertRaisesRegex(ValueError, msg):
         _ = construct_scalar_time_coords(datetime(2017, 12, 1, 14,
                                                   0), None,
                                          datetime(2017, 12, 1, 16, 0))
コード例 #6
0
 def test_error_invalid_time_bounds(self):
     """Test an error is raised if the time point is not between the
     specified bounds"""
     msg = "not within bounds"
     with self.assertRaisesRegex(ValueError, msg):
         _ = construct_scalar_time_coords(
             datetime(2017, 11, 10, 4, 0),
             (datetime(2017, 12, 1, 13, 0), datetime(2017, 12, 1, 14, 0)),
             datetime(2017, 11, 10, 0, 0),
         )
コード例 #7
0
 def test_time_bounds(self):
     """Test creation of time coordinate with bounds"""
     coord_dims = construct_scalar_time_coords(
         datetime(2017, 12, 1, 14, 0),
         (datetime(2017, 12, 1, 13, 0), datetime(2017, 12, 1, 14, 0)),
         datetime(2017, 12, 1, 9, 0),
     )
     time_coord = coord_dims[0][0]
     self.assertEqual(
         iris_time_to_datetime(time_coord)[0], datetime(2017, 12, 1, 14, 0))
     self.assertEqual(time_coord.bounds[0][0], time_coord.points[0] - 3600)
     self.assertEqual(time_coord.bounds[0][1], time_coord.points[0])
コード例 #8
0
 def test_time_bounds_wrong_order(self):
     """Test time bounds are correctly applied even if supplied in the wrong
     order"""
     coord_dims = construct_scalar_time_coords(
         datetime(2017, 12, 1, 14, 0),
         (datetime(2017, 12, 1, 14, 0), datetime(2017, 12, 1, 13, 0)),
         datetime(2017, 12, 1, 9, 0),
     )
     time_coord = coord_dims[0][0]
     self.assertEqual(
         iris_time_to_datetime(time_coord)[0], datetime(2017, 12, 1, 14, 0))
     self.assertEqual(time_coord.bounds[0][0], time_coord.points[0] - 3600)
     self.assertEqual(time_coord.bounds[0][1], time_coord.points[0])
コード例 #9
0
def test_acclen_mismatch_error():
    """Test the process function with mismatched accumulation lengths"""
    rain, snow = setup_cubes(name="thickness_of_{phase}fall_amount")
    time_coords = construct_scalar_time_coords(
        [c.point for c in snow.coord("time").cells()],
        (datetime(2017, 11, 10, 1, 0), datetime(2017, 11, 10, 4, 0)),
        snow.coord("forecast_reference_time").cell(0).point,
    )
    _ = [snow.replace_coord(coord) for coord, _ in time_coords]
    with pytest.raises(
        ValueError, match="Rain and snow cubes do not have the same time coord"
    ):
        SnowFraction()(iris.cube.CubeList([rain, snow]))
コード例 #10
0
ファイル: test_ModalCode.py プロジェクト: nivnac/improver
def wxcode_series_fixture(data, cube_type,
                          offset_reference_times: bool) -> Cube:
    """Generate a time series of weather code cubes for combination to create
    a period representative code. When offset_reference_times is set, each
    successive cube will have a reference time one hour older."""

    time = TARGET_TIME

    ntimes = len(data)
    wxcubes = []

    for i in range(ntimes):
        wxtime = time - timedelta(hours=i)
        wxbounds = [wxtime - timedelta(hours=1), wxtime]
        if offset_reference_times:
            wxfrt = time - timedelta(hours=18) - timedelta(hours=i)
        else:
            wxfrt = time - timedelta(hours=18)
        wxdata = np.ones((2, 2), dtype=np.int8)
        wxdata[0, 0] = data[i]

        if cube_type == "gridded":
            wxcubes.append(
                set_up_wxcube(data=wxdata,
                              time=wxtime,
                              time_bounds=wxbounds,
                              frt=wxfrt))
        else:
            time_coords = construct_scalar_time_coords(wxtime, wxbounds, wxfrt)
            time_coords = [crd for crd, _ in time_coords]
            latitudes = np.array([50, 52, 54, 56])
            longitudes = np.array([-4, -2, 0, 2])
            altitudes = wmo_ids = unique_site_id = np.arange(4)
            unique_site_id_key = "met_office_site_id"
            wxcubes.append(
                build_spotdata_cube(
                    wxdata.flatten(),
                    "weather_code",
                    1,
                    altitudes,
                    latitudes,
                    longitudes,
                    wmo_ids,
                    unique_site_id=unique_site_id,
                    unique_site_id_key=unique_site_id_key,
                    scalar_coords=time_coords,
                ))
    return wxcubes
コード例 #11
0
def blended_spot_median_spot_fixture(spot_template):
    """Spot temperature cube from blend"""
    cube = spot_template.copy()
    cube.attributes = {
        "source": "IMPROVER",
        "institution": "Met Office",
        "title": "IMPROVER Post-Processed Multi-Model Blend UK Spot Values",
        "mosg__model_configuration": "uk_det uk_ens",
        "mosg__model_run": "uk_det:20210203T0900Z:\nuk_ens:20210203T0700Z:",
    }
    (time, _), (blend_time, _), (_, _) = construct_scalar_time_coords(
        time=datetime(2021, 2, 3, 14),
        time_bounds=None,
        frt=datetime(2021, 2, 3, 10))
    blend_time.rename("blend_time")
    cube.add_aux_coord(time)
    cube.add_aux_coord(blend_time)
    return cube
コード例 #12
0
    def test_scalar_coords(self):
        """Test additional scalar coordinates"""
        [(time_coord, _), (frt_coord, _), (fp_coord, _)
         ] = construct_scalar_time_coords(datetime(2015, 11, 23, 4, 30), None,
                                          datetime(2015, 11, 22, 22, 30))

        data = np.ones((2, 4), dtype=np.float32)
        result = build_spotdata_cube(
            data,
            *self.args,
            scalar_coords=[time_coord, frt_coord, fp_coord],
            neighbour_methods=self.neighbour_methods,
        )

        self.assertEqual(result.coord("time").points[0], time_coord.points[0])
        self.assertEqual(
            result.coord("forecast_reference_time").points[0],
            frt_coord.points[0])
        self.assertEqual(
            result.coord("forecast_period").points[0], fp_coord.points[0])
コード例 #13
0
def spot_timezone_fixture(spot_template):
    """Spot data on local time-zones
    (no forecast_period, forecast_reference_time matches spatial dimension)"""
    cube = spot_template.copy()
    cube.attributes = {
        "source": "Met Office Unified Model",
        "institution": "Met Office",
        "title": "Post-Processed MOGREPS-G Model Forecast Global Spot Values",
        "mosg__model_configuration": "gl_ens",
    }
    (time_source_coord,
     _), (frt_coord,
          _), (_,
               _) = construct_scalar_time_coords(time=datetime(2021, 2, 3, 14),
                                                 time_bounds=None,
                                                 frt=datetime(2021, 2, 3, 10))
    cube.add_aux_coord(frt_coord)
    (spatial_index, ) = cube.coord_dims("latitude")
    time_coord = iris.coords.AuxCoord(
        np.full(cube.shape, fill_value=time_source_coord.points),
        standard_name=time_source_coord.standard_name,
        units=time_source_coord.units,
    )
    cube.add_aux_coord(time_coord, spatial_index)
    local_time_coord_standards = TIME_COORDS["time_in_local_timezone"]
    local_time_units = cf_units.Unit(
        local_time_coord_standards.units,
        calendar=local_time_coord_standards.calendar,
    )
    timezone_points = np.array(
        np.round(local_time_units.date2num(datetime(2021, 2, 3, 15))),
        dtype=local_time_coord_standards.dtype,
    )
    cube.add_aux_coord(
        iris.coords.AuxCoord(
            timezone_points,
            long_name="time_in_local_timezone",
            units=local_time_units,
        ))
    return cube
コード例 #14
0
ファイル: conftest.py プロジェクト: tjtg/improver
def truth_spot(truth_grid):
    truth_data_spot = truth_grid[0, ...].data.reshape((2, 9))
    truths_spot_list = CubeList()
    for day in range(5, 7):
        time_coords = construct_scalar_time_coords(
            datetime(2017, 11, day, 4, 0), None, datetime(2017, 11, day, 4, 0),
        )
        time_coords = [t[0] for t in time_coords]
        truths_spot_list.append(
            build_spotdata_cube(
                truth_data_spot,
                name="probability_of_air_temperature_above_threshold",
                units="1",
                altitude=_dummy_point_locations,
                latitude=_dummy_point_locations,
                longitude=_dummy_point_locations,
                wmo_id=_dummy_string_ids,
                additional_dims=[_threshold_coord],
                scalar_coords=time_coords,
            )
        )
    truths_spot = truths_spot_list.merge_cube()
    return truths_spot
コード例 #15
0
    def setUp(self):
        """
        Set up cubes for use in testing SpotLapseRateAdjust. Inputs are
        envisaged as follows:

        Gridded

         Lapse rate  Orography  Temperatures (not used directly)
          (x DALR)

            A B C      A B C        A   B   C

        a   2 1 1      1 1 1       270 270 270
        b   1 2 1      1 4 1       270 280 270
        c   1 1 2      1 1 1       270 270 270

        Spot
        (note the neighbours are identified with the A-C, a-c indices above)

         Site  Temperature Altitude  Nearest    DZ   MinDZ      DZ
                                     neighbour       neighbour

          0        280        3      Ac         2    Bb         -1
          1        270        4      Bb         0    Bb          0
          2        280        0      Ca        -1    Ca         -1


        """
        # Set up lapse rate cube
        lapse_rate_data = np.ones(9).reshape(3, 3).astype(np.float32) * DALR
        lapse_rate_data[0, 2] = 2 * DALR
        lapse_rate_data[1, 1] = 2 * DALR
        lapse_rate_data[2, 0] = 2 * DALR
        self.lapse_rate_cube = set_up_variable_cube(lapse_rate_data,
                                                    name="lapse_rate",
                                                    units="K m-1",
                                                    spatial_grid="equalarea")
        diagnostic_cube_hash = create_coordinate_hash(self.lapse_rate_cube)

        # Set up neighbour and spot diagnostic cubes
        y_coord, x_coord = construct_yx_coords(3, 3, "equalarea")
        y_coord = y_coord.points
        x_coord = x_coord.points

        # neighbours, each group is for a point under two methods, e.g.
        # [ 0.  0.  0.] is the nearest point to the first spot site, whilst
        # [ 1.  1. -1.] is the nearest point with minimum height difference.
        neighbours = np.array([
            [[0.0, 1.0, 2.0], [0.0, 1.0, 2.0], [2.0, 0.0, -1.0]],
            [[1.0, 1.0, 2.0], [1.0, 1.0, 2.0], [-1.0, 0.0, -1.0]],
        ])
        altitudes = np.array([3, 4, 0])
        # pylint: disable=unsubscriptable-object
        latitudes = np.array([y_coord[0], y_coord[1], y_coord[2]])
        longitudes = np.array([x_coord[0], x_coord[1], x_coord[2]])
        wmo_ids = np.arange(3)
        grid_attributes = ["x_index", "y_index", "vertical_displacement"]
        neighbour_methods = ["nearest", "nearest_minimum_dz"]
        self.neighbour_cube = build_spotdata_cube(
            neighbours,
            "grid_neighbours",
            1,
            altitudes,
            latitudes,
            longitudes,
            wmo_ids,
            grid_attributes=grid_attributes,
            neighbour_methods=neighbour_methods,
        )
        self.neighbour_cube.attributes[
            "model_grid_hash"] = diagnostic_cube_hash

        (time, ) = iris_time_to_datetime(self.lapse_rate_cube.coord("time"))
        (frt, ) = iris_time_to_datetime(
            self.lapse_rate_cube.coord("forecast_reference_time"))
        time_bounds = None

        time_coords = construct_scalar_time_coords(time, time_bounds, frt)
        time_coords = [item[0] for item in time_coords]

        # This temperature cube is set up with the spot sites having obtained
        # their temperature values from the nearest grid sites.
        temperatures_nearest = np.array([280, 270, 280])
        self.spot_temperature_nearest = build_spotdata_cube(
            temperatures_nearest,
            "air_temperature",
            "K",
            altitudes,
            latitudes,
            longitudes,
            wmo_ids,
            scalar_coords=time_coords,
        )
        self.spot_temperature_nearest.attributes[
            "model_grid_hash"] = diagnostic_cube_hash

        # This temperature cube is set up with the spot sites having obtained
        # their temperature values from the nearest minimum vertical
        # displacment grid sites. The only difference here is for site 0, which
        # now gets its temperature from Bb (see doc-string above).
        temperatures_mindz = np.array([270, 270, 280])
        self.spot_temperature_mindz = build_spotdata_cube(
            temperatures_mindz,
            "air_temperature",
            "K",
            altitudes,
            latitudes,
            longitudes,
            wmo_ids,
            scalar_coords=time_coords,
        )
        self.spot_temperature_mindz.attributes[
            "model_grid_hash"] = diagnostic_cube_hash
コード例 #16
0
ファイル: test_ApplyEMOS.py プロジェクト: nivnac/improver
    def setUp(self):
        """Set up some "uncalibrated forecast" inputs"""
        attributes = {
            "title": "MOGREPS-UK Forecast",
            "source": "Met Office Unified Model",
            "institution": "Met Office",
        }

        forecast = np.array(
            [
                np.full((3, 3), 10.4),
                np.full((3, 3), 10.8),
                np.full((3, 3), 10.1)
            ],
            dtype=np.float32,
        )
        self.realizations = set_up_variable_cube(forecast,
                                                 units="degC",
                                                 attributes=attributes)

        percentiles = np.array(
            [
                np.full((3, 3), 10.2),
                np.full((3, 3), 10.4),
                np.full((3, 3), 10.6)
            ],
            dtype=np.float32,
        )
        self.percentiles = set_up_percentile_cube(
            percentiles,
            np.array([25, 50, 75], dtype=np.float32),
            units="degC",
            attributes=attributes,
        )

        probabilities = np.array(
            [np.full((3, 3), 1),
             np.full((3, 3), 0.9),
             np.full((3, 3), 0)],
            dtype=np.float32,
        )
        self.probabilities = set_up_probability_cube(
            probabilities,
            np.array([9, 10, 11], dtype=np.float32),
            threshold_units="degC",
            attributes=attributes,
        )

        self.coefficients = build_coefficients_cubelist(
            self.realizations, [0, 1, 0, 1], CubeList([self.realizations]))

        self.null_percentiles_expected_mean = np.mean(self.percentiles.data)
        self.null_percentiles_expected = np.array([
            np.full((3, 3), 10.265101),
            np.full((3, 3), 10.4),
            np.full((3, 3), 10.534898),
        ])
        self.alternative_percentiles = [25.0, 50.0, 75.0]

        land_sea_data = np.array([[1, 1, 0], [1, 1, 0], [1, 0, 0]],
                                 dtype=np.int32)
        self.land_sea_mask = set_up_variable_cube(land_sea_data,
                                                  name="land_binary_mask",
                                                  units="1")

        # Generate site forecast and additional predictor cubes.
        data = np.tile([1.6, 1.3, 1.4, 1.1], (4, 1))
        altitude = np.array([10, 20, 30, 40])
        latitude = np.linspace(58.0, 59.5, 4)
        longitude = np.linspace(-0.25, 0.5, 4)
        wmo_id = ["03001", "03002", "03003", "03004"]
        time_coords = construct_scalar_time_coords(
            datetime.datetime(2017, 11, 5, 4, 0),
            None,
            datetime.datetime(2017, 11, 5, 0, 0),
        )
        time_coords = [t[0] for t in time_coords]
        realization_coord = [
            iris.coords.DimCoord(np.arange(1, 5), standard_name="realization")
        ]
        self.realizations_spot_cube = build_spotdata_cube(
            data,
            "air_temperature",
            "degC",
            altitude,
            latitude,
            longitude,
            wmo_id,
            scalar_coords=time_coords,
            additional_dims=realization_coord,
        )

        self.realizations_spot_cube.attributes.update(
            MANDATORY_ATTRIBUTE_DEFAULTS)

        self.spot_altitude_cube = self.realizations_spot_cube[0].copy(
            self.realizations_spot_cube.coord("altitude").points)
        self.spot_altitude_cube.rename("altitude")
        self.spot_altitude_cube.units = "m"
        for coord in [
                "altitude",
                "forecast_period",
                "forecast_reference_time",
                "realization",
                "time",
        ]:
            self.spot_altitude_cube.remove_coord(coord)

        self.spot_coefficients = build_coefficients_cubelist(
            self.realizations_spot_cube,
            [0, [0.9, 0.1], 0, 1],
            CubeList([self.realizations_spot_cube, self.spot_altitude_cube]),
        )
コード例 #17
0
ファイル: helper_functions.py プロジェクト: zfan001/improver
    def setUp(self):
        """Set up temperature and wind speed cubes for testing."""
        super().setUp()
        frt_dt = datetime.datetime(2017, 11, 10, 0, 0)
        time_dt = datetime.datetime(2017, 11, 10, 4, 0)

        base_data = np.array(
            [
                [[0.3, 1.1, 2.6], [4.2, 5.3, 5.9], [7.1, 8.2, 8.8]],
                [[0.7, 2.0, 2.9], [4.3, 5.6, 6.4], [7.0, 7.0, 9.2]],
                [[2.1, 3.0, 3.1], [4.8, 5.0, 6.1], [7.9, 8.1, 8.9]],
            ],
            dtype=np.float32,
        )
        temperature_data = Unit("Celsius").convert(base_data, "Kelvin")
        self.current_temperature_forecast_cube = set_up_variable_cube(
            temperature_data,
            units="Kelvin",
            realizations=[0, 1, 2],
            time=time_dt,
            frt=frt_dt,
            attributes=MANDATORY_ATTRIBUTE_DEFAULTS,
        )

        time_dt = time_dt - datetime.timedelta(days=5)
        frt_dt = frt_dt - datetime.timedelta(days=5)

        # Create historic forecasts and truth
        self.historic_forecasts = _create_historic_forecasts(
            temperature_data, time_dt, frt_dt, realizations=[0, 1, 2])
        self.truth = _create_truth(temperature_data, time_dt)

        # Create a combined list of historic forecasts and truth
        self.combined = self.historic_forecasts + self.truth

        # Create the historic and truth cubes
        self.historic_temperature_forecast_cube = self.historic_forecasts.merge_cube(
        )
        self.temperature_truth_cube = self.truth.merge_cube()

        # Create a cube for testing wind speed.
        self.current_wind_speed_forecast_cube = set_up_variable_cube(
            base_data,
            name="wind_speed",
            units="m s-1",
            realizations=[0, 1, 2],
            attributes=MANDATORY_ATTRIBUTE_DEFAULTS,
        )

        self.historic_wind_speed_forecast_cube = _create_historic_forecasts(
            base_data,
            time_dt,
            frt_dt,
            realizations=[0, 1, 2],
            name="wind_speed",
            units="m s-1",
        ).merge_cube()

        self.wind_speed_truth_cube = _create_truth(base_data,
                                                   time_dt,
                                                   name="wind_speed",
                                                   units="m s-1").merge_cube()

        # Set up another set of cubes which have a halo of zeros round the
        # original data. This data will be masked out in tests using a
        # landsea_mask
        base_data = np.pad(base_data, ((0, 0), (1, 1), (1, 1)),
                           mode="constant")
        temperature_data = Unit("Celsius").convert(base_data, "Kelvin")

        # Create historic forecasts and truth
        self.historic_forecasts_halo = _create_historic_forecasts(
            temperature_data, time_dt, frt_dt, realizations=[0, 1, 2])
        self.truth_halo = _create_truth(temperature_data, time_dt)

        # Create the historic and truth cubes
        self.historic_temperature_forecast_cube_halo = (
            self.historic_forecasts_halo.merge_cube())
        self.temperature_truth_cube_halo = self.truth_halo.merge_cube()

        # Create a cube for testing wind speed.
        self.historic_wind_speed_forecast_cube_halo = _create_historic_forecasts(
            base_data,
            time_dt,
            frt_dt,
            realizations=[0, 1, 2],
            name="wind_speed",
            units="m s-1",
        ).merge_cube()

        self.wind_speed_truth_cube_halo = _create_truth(
            base_data, time_dt, name="wind_speed", units="m s-1").merge_cube()

        data = np.array([1.6, 1.3, 1.4, 1.1])
        altitude = np.array([10, 20, 30, 40])
        latitude = np.linspace(58.0, 59.5, 4)
        longitude = np.linspace(-0.25, 0.5, 4)
        wmo_id = ["03001", "03002", "03003", "03004"]
        forecast_spot_cubes = iris.cube.CubeList()
        for realization in range(1, 3):
            realization_coord = [
                iris.coords.DimCoord(realization, standard_name="realization")
            ]
            for day in range(5, 11):
                time_coords = construct_scalar_time_coords(
                    datetime.datetime(2017, 11, day, 4, 0),
                    None,
                    datetime.datetime(2017, 11, day, 0, 0),
                )
                time_coords = [t[0] for t in time_coords]
                forecast_spot_cubes.append(
                    build_spotdata_cube(
                        data + 0.2 * day,
                        "air_temperature",
                        "degC",
                        altitude,
                        latitude,
                        longitude,
                        wmo_id,
                        scalar_coords=time_coords + realization_coord,
                    ))
        forecast_spot_cube = forecast_spot_cubes.merge_cube()

        self.historic_forecast_spot_cube = forecast_spot_cube[:, :5, :]
        self.historic_forecast_spot_cube.convert_units("Kelvin")
        self.historic_forecast_spot_cube.attributes = MANDATORY_ATTRIBUTE_DEFAULTS

        self.current_forecast_spot_cube = forecast_spot_cube[:, 5, :]
        self.current_forecast_spot_cube.convert_units("Kelvin")
        self.current_forecast_spot_cube.attributes = MANDATORY_ATTRIBUTE_DEFAULTS

        self.truth_spot_cube = self.historic_forecast_spot_cube[0].copy()
        self.truth_spot_cube.remove_coord("realization")
        self.truth_spot_cube.data = self.truth_spot_cube.data + 1.0
コード例 #18
0
ファイル: test_ModalCode.py プロジェクト: tjtg/improver
def wxcode_series_fixture(
    data,
    cube_type,
    offset_reference_times: bool,
    model_id_attr: bool,
    record_run_attr: bool,
) -> Tuple[bool, CubeList]:
    """Generate a time series of weather code cubes for combination to create
    a period representative code. When offset_reference_times is set, each
    successive cube will have a reference time one hour older."""

    time = TARGET_TIME

    ntimes = len(data)
    wxcubes = CubeList()

    for i in range(ntimes):
        wxtime = time - timedelta(hours=i)
        wxbounds = [wxtime - timedelta(hours=1), wxtime]
        if offset_reference_times:
            wxfrt = time - timedelta(hours=18) - timedelta(hours=i)
        else:
            wxfrt = time - timedelta(hours=18)
        wxdata = np.ones((2, 2), dtype=np.int8)
        wxdata[0, 0] = data[i]

        if cube_type == "gridded":
            wxcubes.append(
                set_up_wxcube(data=wxdata, time=wxtime, time_bounds=wxbounds, frt=wxfrt)
            )
        else:
            time_coords = construct_scalar_time_coords(wxtime, wxbounds, wxfrt)
            time_coords = [crd for crd, _ in time_coords]
            latitudes = np.array([50, 52, 54, 56])
            longitudes = np.array([-4, -2, 0, 2])
            altitudes = wmo_ids = unique_site_id = np.arange(4)
            unique_site_id_key = "met_office_site_id"
            wxcubes.append(
                build_spotdata_cube(
                    wxdata.flatten(),
                    "weather_code",
                    1,
                    altitudes,
                    latitudes,
                    longitudes,
                    wmo_ids,
                    unique_site_id=unique_site_id,
                    unique_site_id_key=unique_site_id_key,
                    scalar_coords=time_coords,
                )
            )

        # Add a blendtime coordinate as UK weather symbols are constructed
        # from model blended data.
        blend_time = wxcubes[-1].coord("forecast_reference_time").copy()
        blend_time.rename("blend_time")
        wxcubes[-1].add_aux_coord(blend_time)

        if model_id_attr:
            if i == 0:
                wxcubes[-1].attributes.update({MODEL_ID_ATTR: "uk_det uk_ens"})
            else:
                wxcubes[-1].attributes.update({MODEL_ID_ATTR: "uk_ens"})

        if record_run_attr:
            ukv_time = wxfrt - timedelta(hours=1)
            enukx_time = wxfrt - timedelta(hours=3)
            if i == 0:
                wxcubes[-1].attributes.update(
                    {
                        RECORD_RUN_ATTR: f"uk_det:{ukv_time:{TIME_FORMAT}}:\nuk_ens:{enukx_time:{TIME_FORMAT}}:"  # noqa: E501
                    }
                )
            else:
                wxcubes[-1].attributes.update(
                    {RECORD_RUN_ATTR: f"uk_ens:{enukx_time:{TIME_FORMAT}}:"}
                )

    return model_id_attr, record_run_attr, offset_reference_times, wxcubes