Example #1
0
    def _create_output_cube(
        self, cube: Cube, advected_data: ndarray, timestep: timedelta
    ) -> Cube:
        """
        Create a cube and appropriate metadata to contain the advected forecast

        Args:
            cube:
                Source cube (before advection)
            advected_data:
                Advected data
            timestep:
                Time difference between the advected output and the source

        Returns:
            The output cube
        """
        attributes = generate_mandatory_attributes([cube])
        if "institution" in cube.attributes.keys():
            attributes["source"] = "{} Nowcast".format(attributes["institution"])
        else:
            attributes["source"] = "Nowcast"
        advected_cube = create_new_diagnostic_cube(
            cube.name(), cube.units, cube, attributes, data=advected_data
        )
        amend_attributes(advected_cube, self.attributes_dict)
        set_history_attribute(advected_cube, "Nowcast")

        self._update_time(cube.coord("time").copy(), advected_cube, timestep)
        self._add_forecast_reference_time(cube.coord("time").copy(), advected_cube)
        self._add_forecast_period(advected_cube, timestep)

        return advected_cube
Example #2
0
    def create_wet_bulb_temperature_cube(self, temperature: Cube,
                                         relative_humidity: Cube,
                                         pressure: Cube) -> Cube:
        """
        Creates a cube of wet bulb temperature values

        Args:
            temperature:
                Cube of air temperatures.
            relative_humidity:
                Cube of relative humidities.
            pressure:
                Cube of air pressures.

        Returns:
            Cube of wet bulb temperature (K).
        """
        temperature.convert_units("K")
        relative_humidity.convert_units(1)
        pressure.convert_units("Pa")
        wbt_data = self._calculate_wet_bulb_temperature(
            pressure.data, relative_humidity.data, temperature.data)

        attributes = generate_mandatory_attributes(
            [temperature, relative_humidity, pressure])
        wbt = create_new_diagnostic_cube("wet_bulb_temperature",
                                         "K",
                                         temperature,
                                         attributes,
                                         data=wbt_data)
        return wbt
    def _create_template_cube(self, cube):
        """
        Create a template cube to store the timezone masks. This cube has only
        one scalar coordinate which is time, denoting when it is valid; this is
        only relevant if using daylight savings. The attribute
        includes_daylight_savings is set to indicate this.

        Args:
            cube (iris.cube.Cube):
                A cube with the desired grid from which coordinates are taken
                for inclusion in the template.
        Returns:
            iris.cube.Cube:
                A template cube in which each timezone mask can be stored.
        """
        time_point = np.array(self.time.timestamp(), dtype=np.int64)
        time_coord = iris.coords.DimCoord(
            time_point,
            "time",
            units=Unit("seconds since 1970-01-01 00:00:00", calendar="gregorian"),
        )

        for crd in cube.coords(dim_coords=False):
            cube.remove_coord(crd)
        cube.add_aux_coord(time_coord)

        attributes = generate_mandatory_attributes([cube])
        attributes["includes_daylight_savings"] = str(self.include_dst)

        return create_new_diagnostic_cube(
            "timezone_mask", 1, cube, attributes, dtype=np.int32
        )
Example #4
0
    def create_symbol_cube(cubes):
        """
        Create an empty weather symbol cube

        Args:
            cubes (list or iris.cube.CubeList):
                List of input cubes used to generate weather symbols
        Returns:
            iris.cube.Cube:
                A cube with suitable metadata to describe the weather symbols
                that will fill it
        """
        threshold_coord = find_threshold_coordinate(cubes[0])
        template_cube = next(cubes[0].slices_over([threshold_coord])).copy()
        # remove coordinates and bounds that do not apply to weather symbols
        template_cube.remove_coord(threshold_coord)
        for coord in template_cube.coords():
            if coord.name() in ['forecast_period', 'time']:
                coord.bounds = None

        attributes = generate_mandatory_attributes(cubes)
        symbols = create_new_diagnostic_cube(
            "weather_code",
            "1",
            template_cube,
            attributes,
            optional_attributes=weather_code_attributes(),
            dtype=np.int32)
        return symbols
Example #5
0
    def _calculate_snow_fraction(self) -> Cube:
        """
        Calculates the snow fraction data and interpolates to fill in the missing points.

        Returns:
            Snow fraction cube.
        """
        with np.errstate(divide="ignore", invalid="ignore"):
            snow_fraction = self.snow.data / (self.rain.data + self.snow.data)
        snow_fraction_cube = create_new_diagnostic_cube(
            "snow_fraction",
            "1",
            template_cube=self.rain,
            mandatory_attributes=generate_mandatory_attributes(
                iris.cube.CubeList([self.rain, self.snow]),
                model_id_attr=self.model_id_attr,
            ),
            data=snow_fraction,
        )

        spatial_dims = [
            snow_fraction_cube.coord(axis=n).name() for n in ["y", "x"]
        ]
        snow_fraction_interpolated = iris.cube.CubeList()
        for snow_fraction_slice in snow_fraction_cube.slices(spatial_dims):
            snow_fraction_interpolated.append(
                snow_fraction_slice.copy(
                    interpolate_missing_data(snow_fraction_slice.data,
                                             method="nearest")))
        return snow_fraction_interpolated.merge_cube()
Example #6
0
    def process(self, cubes):
        """
        Calculate the convective ratio from the convective and dynamic components as:
            convective_ratio = convective / (convective + dynamic)

        If convective + dynamic is zero, then the resulting point is masked.

        Args:
            cubes (List[iris.cube.Cube, iris.cube.Cube]):
                Both the convective and dynamic components as iris.cube.Cube in a list
                with names 'lwe_convective_precipitation_rate' and
                'lwe_stratiform_precipitation_rate'

        Returns:
            iris.cube.Cube:
                Cube containing the convective ratio.
        """

        self._split_input(cubes)

        attributes = generate_mandatory_attributes([self.convective])
        output_cube = create_new_diagnostic_cube(
            "convective_ratio",
            "1",
            self.convective,
            attributes,
            data=self._convective_ratio(),
        )

        return output_cube
Example #7
0
def calculate_sleet_probability(prob_of_snow,
                                prob_of_rain):
    """
    This calculates the probability of sleet using the calculation:
    prob(sleet) = 1 - (prob(snow) + prob(rain))

    Args:
      prob_of_snow (iris.cube.Cube):
        Cube of the probability of snow.

      prob_of_rain (iris.cube.Cube):
        Cube of the probability of rain.

    Returns:
      iris.cube.Cube:
        Cube of the probability of sleet.

        Raises:
            ValueError: If the cube contains negative values for the the
            probability of sleet.
    """
    ones = np.ones((prob_of_snow.shape), dtype="float32")
    sleet_prob = (ones - (prob_of_snow.data + prob_of_rain.data))
    if np.any(sleet_prob < 0.0):
        msg = ("Negative values of sleet probability have been calculated.")
        raise ValueError(msg)

# In this case we want to copy all the attributes from the prob_of_snow cube
    mandatory_attributes = generate_mandatory_attributes([
        prob_of_rain, prob_of_snow])
    probability_of_sleet = create_new_diagnostic_cube(
        'probability_of_sleet', '1', prob_of_snow, mandatory_attributes,
        data=sleet_prob)
    return probability_of_sleet
Example #8
0
    def create_symbol_cube(self, cubes: Union[List[Cube], CubeList]) -> Cube:
        """
        Create an empty weather symbol cube

        Args:
            cubes:
                List of input cubes used to generate weather symbols

        Returns:
            A cube with suitable metadata to describe the weather symbols
            that will fill it and data initiated with the value -1 to allow
            any unset points to be readily identified.
        """
        threshold_coord = find_threshold_coordinate(self.template_cube)
        template_cube = next(self.template_cube.slices_over([threshold_coord
                                                             ])).copy()
        # remove coordinates and bounds that do not apply to weather symbols
        template_cube.remove_coord(threshold_coord)

        mandatory_attributes = generate_mandatory_attributes(cubes)
        optional_attributes = weather_code_attributes()
        if self.model_id_attr:
            optional_attributes.update(
                update_model_id_attr_attribute(cubes, self.model_id_attr))

        symbols = create_new_diagnostic_cube(
            "weather_code",
            "1",
            template_cube,
            mandatory_attributes,
            optional_attributes=optional_attributes,
            data=np.ma.masked_all_like(template_cube.data).astype(np.int32),
        )
        return symbols
Example #9
0
    def _reformat_analysis_cube(self, attribute_changes):
        """
        Add forecast reference time and forecast period coordinates (if they do
        not already exist) and nowcast attributes to analysis cube
        """
        coords = [coord.name() for coord in self.analysis_cube.coords()]
        if "forecast_reference_time" not in coords:
            frt_coord = self.analysis_cube.coord("time").copy()
            frt_coord.rename("forecast_reference_time")
            self.analysis_cube.add_aux_coord(frt_coord)
        if "forecast_period" not in coords:
            self.analysis_cube.add_aux_coord(
                AuxCoord(np.array([0], dtype=np.int32), "forecast_period", "seconds")
            )

        self.analysis_cube.attributes = generate_mandatory_attributes(
            [self.analysis_cube]
        )
        self.analysis_cube.attributes["source"] = "MONOW"
        self.analysis_cube.attributes[
            "title"
        ] = "MONOW Extrapolation Nowcast on UK 2 km Standard Grid"
        set_history_attribute(self.analysis_cube, "Nowcast")
        if attribute_changes is not None:
            amend_attributes(self.analysis_cube, attribute_changes)
 def test_metadata(self):
     """Test that the metadata on the resulting cube is as expected"""
     expected_attributes = generate_mandatory_attributes([self.cube])
     result = self.plugin.process(self.cube)
     self.assertEqual(result.name(), self.cube.name() + "_integral")
     self.assertEqual(result.units, "{} m".format(self.cube.units))
     self.assertDictEqual(result.attributes, expected_attributes)
def calculate_feels_like_temperature(temperature,
                                     wind_speed,
                                     relative_humidity,
                                     pressure,
                                     model_id_attr=None):
    """
    Calculates the feels like temperature using a combination of
    the wind chill index and Steadman's apparent temperature equation.

    Args:
        temperature (iris.cube.Cube):
            Cube of air temperatures
        wind_speed (iris.cube.Cube):
            Cube of 10m wind speeds
        relative_humidity (iris.cube.Cube):
            Cube of relative humidities
        pressure (iris.cube.Cube):
            Cube of air pressure
        model_id_attr (str):
            Name of the attribute used to identify the source model for
            blending.

    Returns:
        iris.cube.Cube:
            Cube of feels like temperatures in the same units as the input
            temperature cube.
    """
    t_cube = temperature.copy()
    t_cube.convert_units('degC')
    t_celsius = t_cube.data

    w_cube = wind_speed.copy()
    w_cube.convert_units('m s-1')
    p_cube = pressure.copy()
    p_cube.convert_units('Pa')
    rh_cube = relative_humidity.copy()
    rh_cube.convert_units('1')
    apparent_temperature = _calculate_apparent_temperature(
        t_celsius, w_cube.data, rh_cube.data, p_cube.data)

    w_cube.convert_units('km h-1')
    wind_chill = _calculate_wind_chill(t_celsius, w_cube.data)

    feels_like_temperature = _feels_like_temperature(t_celsius,
                                                     apparent_temperature,
                                                     wind_chill)

    attributes = generate_mandatory_attributes(
        [temperature, wind_speed, relative_humidity, pressure],
        model_id_attr=model_id_attr)
    feels_like_temperature_cube = create_new_diagnostic_cube(
        "feels_like_temperature",
        "degC",
        temperature,
        attributes,
        data=feels_like_temperature)
    feels_like_temperature_cube.convert_units(temperature.units)

    return feels_like_temperature_cube
Example #12
0
 def test_model_id_consensus(self):
     """Test model ID attribute can be specified and inherited"""
     expected_attributes = self.attributes.copy()
     expected_attributes["mosg__model_configuration"] = "uk_det"
     result = generate_mandatory_attributes(
         [self.t_cube, self.p_cube, self.rh_cube],
         model_id_attr="mosg__model_configuration")
     self.assertDictEqual(result, expected_attributes)
Example #13
0
 def test_missing_attribute(self):
     """Test defaults are triggered if mandatory attribute is missing
     from one input"""
     expected_attributes = self.attributes
     expected_attributes["title"] = MANDATORY_ATTRIBUTE_DEFAULTS["title"]
     self.t_cube.attributes.pop("title")
     result = generate_mandatory_attributes([self.t_cube, self.p_cube, self.rh_cube])
     self.assertDictEqual(result, expected_attributes)
Example #14
0
 def _output_metadata(self) -> Tuple[Cube, Dict]:
     """Returns template cube and mandatory attributes for result"""
     template = next(
         self.cubes[0].slices_over(find_threshold_coordinate(self.cubes[0]))
     )
     template.remove_coord(find_threshold_coordinate(self.cubes[0]))
     attributes = generate_mandatory_attributes(self.cubes)
     return template, attributes
Example #15
0
    def _apply_error_to_forecast(self, forecast_cube: Cube,
                                 error_percentiles_cube: Cube) -> Cube:
        """Apply the error distributions (as error percentiles) to the forecast cube.
        The result is a series (sub-ensemble) of values for each forecast realization.

        Note:

            Within the RainForests approach we work with an additive error correction
            as opposed to a multiplicative correction used in ECPoint. The advantage of
            using an additive error is that we are also able to calibrate zero-values in
            the input forecast.

        Warning:

            After applying the error distributions to the forecast cube, values outside
            the expected bounds of the forecast parameter can arise. These values occur when
            when the input forecast value is between error thresholds and there exists a
            lower bound on the observable value (eg. 0 in the case of rainfall).

            In this situation, error thresholds below the residual value (min(obs) - fcst)
            must have a probability of exceedance of 1, whereas as error thresholds above
            this value can take on any value between [0, 1]. In the subsequent step where
            error percentile values are extracted, the linear interpolation in mapping from
            probabilities to percentiles can give percentile values that lie below the
            residual value; when these are applied to the forecast value, they result in
            forecast values outside the expected bounds of the forecast parameter in the
            resultant sub-ensemble.

            To address this, we remap all values outside of the expected bounds to nearest
            bound (eg. negative values are mapped to 0 in the case of rainfall).

        Args:
            forecast_cube:
                Cube containing the forecast to be calibrated.
            error_percentiles_cube:
                Cube containing percentile values for the error distributions.

        Returns:
            Cube containing the forecast sub-ensembles.
        """
        # Apply the error_percentiles to the forecast_cube (additive correction)
        forecast_subensembles_data = (forecast_cube.data[:, np.newaxis] +
                                      error_percentiles_cube.data)
        # RAINFALL SPECIFIC IMPLEMENTATION:
        # As described above, we need to address value outside of expected bounds.
        # In the case of rainfall, we map all negative values to 0.
        forecast_subensembles_data = np.maximum(0.0,
                                                forecast_subensembles_data)
        # Return cube containing forecast subensembles
        return create_new_diagnostic_cube(
            name=forecast_cube.name(),
            units=forecast_cube.units,
            template_cube=error_percentiles_cube,
            mandatory_attributes=generate_mandatory_attributes([forecast_cube
                                                                ]),
            optional_attributes=forecast_cube.attributes,
            data=forecast_subensembles_data,
        )
Example #16
0
    def process(self, cubes: CubeList, model_id_attr: str = None) -> Cube:
        """
        From the supplied CAPE and precipitation-rate cubes, calculate a probability
        of lightning cube.

        Args:
            cubes:
                Cubes of CAPE and Precipitation rate.
            model_id_attr:
                The name of the dataset attribute to be used to identify the source
                model when blending data from different models.

        Returns:
            Cube of lightning data

        Raises:
            ValueError:
                If one of the cubes is not found or doesn't match the other
        """
        cape, precip = self._get_inputs(cubes)

        cape_true = LatitudeDependentThreshold(
            lambda lat: latitude_to_threshold(
                lat, midlatitude=350.0, tropics=500.0),
            threshold_units="J kg-1",
            comparison_operator=">",
        )(cape)

        precip_true = LatitudeDependentThreshold(
            lambda lat: latitude_to_threshold(
                lat, midlatitude=1.0, tropics=4.0),
            threshold_units="mm h-1",
            comparison_operator=">",
        )(precip)

        data = cape_true.data * precip_true.data

        cube = create_new_diagnostic_cube(
            name=
            "probability_of_number_of_lightning_flashes_per_unit_area_above_threshold",
            units="1",
            template_cube=precip,
            data=data.astype(FLOAT_DTYPE),
            mandatory_attributes=generate_mandatory_attributes(
                cubes, model_id_attr=model_id_attr),
        )

        coord = DimCoord(
            np.array([0], dtype=FLOAT_DTYPE),
            units="m-2",
            long_name="number_of_lightning_flashes_per_unit_area",
            var_name="threshold",
            attributes={"spp__relative_to_threshold": "greater_than"},
        )
        cube.add_aux_coord(coord)

        return cube
Example #17
0
 def test_no_consensus(self):
     """Test default values if input fields do not all agree"""
     self.t_cube.attributes = {
         "source": "Met Office Unified Model Version 1000",
         "institution": "BOM",
         "title": "UKV Model Forecast on 20 km Global Grid",
     }
     result = generate_mandatory_attributes([self.t_cube, self.p_cube, self.rh_cube])
     self.assertDictEqual(result, MANDATORY_ATTRIBUTE_DEFAULTS)
Example #18
0
    def create_output_cube(self, cube: Cube, local_time: datetime) -> Cube:
        """
        Constructs the output cube

        Args:
            cube:
                Cube of data to extract timezone-offsets from. Must contain a time
                coord spanning all the timezones.
            local_time:
                The "local" time of the output cube as %Y%m%dT%H%MZ. This will form a
                scalar "time_in_local_timezone" coord on the output cube, while the
                "time" coord will be auxillary to the spatial coords and will show the
                UTC time that matches the local_time at each point.

        """
        template_cube = cube.slices_over("time").next().copy()
        template_cube.remove_coord("time")
        template_cube.remove_coord("forecast_period")
        output_cube = create_new_diagnostic_cube(
            template_cube.name(),
            template_cube.units,
            template_cube,
            generate_mandatory_attributes([template_cube]),
            optional_attributes=template_cube.attributes,
            data=self.output_data,
        )

        # Copy cell-methods from template_cube
        [output_cube.add_cell_method(cm) for cm in template_cube.cell_methods]

        # Create a local time coordinate to help with plotting data.
        local_time_coord_standards = TIME_COORDS["time_in_local_timezone"]
        local_time_units = cf_units.Unit(
            local_time_coord_standards.units,
            calendar=local_time_coord_standards.calendar,
        )
        timezone_points = np.array(
            np.round(local_time_units.date2num(local_time)),
            dtype=local_time_coord_standards.dtype,
        )
        output_cube.add_aux_coord(
            AuxCoord(
                timezone_points,
                long_name="time_in_local_timezone",
                units=local_time_units,
            )
        )
        output_cube.add_aux_coord(
            AuxCoord(
                self.time_points,
                bounds=self.time_bounds,
                standard_name="time",
                units=self.time_units,
            ),
            [n + output_cube.ndim for n in [-2, -1]],
        )
        return output_cube
Example #19
0
    def _create_output_cube(self, template, data, points, bounds):
        """
        Populates a template cube with data from the integration

        Args:
            template (iris.cube.Cube):
                Copy of upper or lower bounds cube, based on direction of
                integration
            data (list or numpy.ndarray):
                Integrated data
            points (list or numpy.ndarray):
                Points values for the integrated coordinate. These will not
                match the template cube if any slices were skipped in the
                integration, and therefore are used to slice the template cube
                to match the data array.
            bounds (list or numpy.ndarray):
                Bounds values for the integrated coordinate

        Returns:
            iris.cube.Cube
        """
        # extract required slices from template cube
        template = template.extract(
            iris.Constraint(coord_values={
                self.coord_name_to_integrate: lambda x: x in points
            }))

        # re-promote integrated coord to dimension coord if need be
        aux_coord_names = [coord.name() for coord in template.aux_coords]
        if self.coord_name_to_integrate in aux_coord_names:
            template = iris.util.new_axis(template,
                                          self.coord_name_to_integrate)

        # order dimensions on the template cube so that the integrated
        # coordinate is first (as this is the leading dimension on the
        # data array)
        enforce_coordinate_ordering(template, self.coord_name_to_integrate)

        # generate appropriate metadata for new cube
        attributes = generate_mandatory_attributes([template])
        coord_dtype = template.coord(self.coord_name_to_integrate).dtype
        name, units = self._generate_output_name_and_units()

        # create new cube from template
        integrated_cube = create_new_diagnostic_cube(name,
                                                     units,
                                                     template,
                                                     attributes,
                                                     data=np.array(data))

        integrated_cube.coord(self.coord_name_to_integrate).bounds = np.array(
            bounds).astype(coord_dtype)

        # re-order cube to match dimensions of input cube
        ordered_dimensions = get_dim_coord_names(self.input_cube)
        enforce_coordinate_ordering(integrated_cube, ordered_dimensions)
        return integrated_cube
Example #20
0
    def setUp(self):
        """Set up the plugin and cubes for testing."""
        super().setUp()
        frt_dt = datetime.datetime(2017, 11, 10, 0, 0)
        time_dt = datetime.datetime(2017, 11, 10, 4, 0)
        data = np.ones((3, 3), dtype=np.float32)
        self.historic_forecast = _create_historic_forecasts(
            data, time_dt, frt_dt,
        ).merge_cube()
        data_with_realizations = np.ones((3, 3, 3), dtype=np.float32)
        self.historic_forecast_with_realizations = _create_historic_forecasts(
            data_with_realizations, time_dt, frt_dt, realizations=[0, 1, 2],
        ).merge_cube()
        self.optimised_coeffs = np.array([0, 1, 2, 3], np.int32)

        self.distribution = "norm"
        self.desired_units = "degreesC"
        self.predictor = "mean"
        self.plugin = Plugin(
            distribution=self.distribution,
            desired_units=self.desired_units,
            predictor=self.predictor,
        )
        self.expected_frt = (
            self.historic_forecast.coord("forecast_reference_time").cell(-1).point
        )
        self.expected_x_coord_points = np.median(
            self.historic_forecast.coord(axis="x").points
        )
        self.historic_forecast.coord(axis="x").guess_bounds()
        self.expected_x_coord_bounds = np.array(
            [
                [
                    np.min(self.historic_forecast.coord(axis="x").bounds),
                    np.max(self.historic_forecast.coord(axis="x").bounds),
                ]
            ]
        )
        self.expected_y_coord_points = np.median(
            self.historic_forecast.coord(axis="y").points
        )
        self.historic_forecast.coord(axis="y").guess_bounds()
        self.expected_y_coord_bounds = np.array(
            [
                [
                    np.min(self.historic_forecast.coord(axis="y").bounds),
                    np.max(self.historic_forecast.coord(axis="y").bounds),
                ]
            ]
        )
        self.attributes = generate_mandatory_attributes([self.historic_forecast])
        self.attributes["diagnostic_standard_name"] = self.historic_forecast.name()
        self.attributes["distribution"] = self.distribution
        self.attributes["title"] = "Ensemble Model Output Statistics coefficients"
Example #21
0
    def _create_solar_time_cube(
        self,
        solar_time_data: ndarray,
        target_grid: Cube,
        time: datetime,
        new_title: Optional[str],
    ) -> Cube:
        """Create solar time cube for the specified valid time.

        Args:
            solar_time_data:
                Solar time data.
            target_grid:
                Cube containing spatial grid over which the solar time has been
                calculated.
            time:
                Time associated with the local solar time.
            new_title:
                New title for the output cube attributes. If None, this attribute is
                left out since it has no prescribed standard.

        Returns:
            Solar time data as an iris cube.
        """
        X_coord = target_grid.coord(axis="X")
        Y_coord = target_grid.coord(axis="Y")

        time_coord = AuxCoord(
            np.array(time.replace(tzinfo=timezone.utc).timestamp(),
                     dtype=np.int64),
            standard_name="time",
            units=cf_units.Unit(
                "seconds since 1970-01-01 00:00:00 UTC",
                calendar=cf_units.CALENDAR_STANDARD,
            ),
        )

        attrs = generate_mandatory_attributes([target_grid])
        attrs["source"] = "IMPROVER"
        if new_title is not None:
            attrs["title"] = new_title
        else:
            attrs.pop("title", None)

        solar_time_cube = Cube(
            solar_time_data.astype(np.float32),
            long_name=SOLAR_TIME_CF_NAME,
            units="hours",
            dim_coords_and_dims=[(Y_coord, 0), (X_coord, 1)],
            aux_coords_and_dims=[(time_coord, None)],
            attributes=attrs,
        )

        return solar_time_cube
Example #22
0
    def process(self, cube):
        """
        Calculate the convective ratio either for the underlying field e.g.
        precipitation rate, or using the differences between adjacent grid
        squares.

        If the difference between adjacent grid squares is used, firstly the
        absolute differences are calculated, and then the difference cubes are
        thresholded using a high and low threshold. The thresholded difference
        cubes are then summed in order to put these cubes back onto the grid
        of the original cube. The convective ratio is then calculated by
        applying neighbourhood processing to the resulting cubes by dividing
        the high threshold cube by the low threshold cube.

        Args:
            cube (iris.cube.Cube):
                The cube from which the convective ratio will be calculated.

        Returns:
            iris.cube.Cube:
                Cube containing the convective ratio defined as the ratio
                between a cube with a high threshold applied and a cube with a
                low threshold applied.
        """
        cubelist = iris.cube.CubeList([])
        threshold_list = [self.lower_threshold, self.higher_threshold]
        if self.use_adjacent_grid_square_differences:
            for threshold in threshold_list:
                diff_cubelist = self.absolute_differences_between_adjacent_grid_squares(
                    cube)
                thresholded_cubes = self.iterate_over_threshold(
                    diff_cubelist, threshold)
                cubelist.append(
                    self.sum_differences_between_adjacent_grid_squares(
                        cube, thresholded_cubes))
        else:
            for threshold in threshold_list:
                cubelist.extend(self.iterate_over_threshold([cube], threshold))

        convective_ratios = self._calculate_convective_ratio(
            cubelist, threshold_list)

        attributes = generate_mandatory_attributes([cube])
        output_cube = create_new_diagnostic_cube("convective_ratio",
                                                 "1",
                                                 cube,
                                                 attributes,
                                                 data=convective_ratios)

        return output_cube
    def process(self, cubes: Union[CubeList, List[Cube]]) -> Cube:
        """
        Derives the probability of a precipitation phase at the surface. If
        the snow-sleet falling-level is supplied, this is the probability of
        snow at (or below) the surface. If the sleet-rain falling-level is
        supplied, this is the probability of rain at (or above) the surface.
        If the hail-rain falling-level is supplied, this is the probability
        of rain from hail at (or above) the surface.

        Args:
            cubes:
                Contains cubes of the altitude of the phase-change level (this
                can be snow->sleet, hail->rain or sleet->rain) and the altitude
                of the orography.

        Returns:
            Cube containing the probability of a specific precipitation phase
            reaching the surface orography. If the falling_level_cube was
            snow->sleet, then this will be the probability of snow at the
            surface. If the falling_level_cube was sleet->rain, then this
            will be the probability of rain from sleet at the surface.
            If the falling_level_cube was hail->rain, then this
            will be the probability of rain from hail at the surface.
            The probabilities are categorical (1 or 0) allowing
            precipitation to be divided uniquely between snow, sleet and
            rain phases.
        """
        self._extract_input_cubes(cubes)
        processed_falling_level = iris.util.squeeze(
            self.get_discriminating_percentile(self.falling_level_cube))

        result_data = np.where(
            self.comparator(self.orography_cube.data,
                            processed_falling_level.data),
            1,
            0,
        ).astype(np.int8)
        mandatory_attributes = generate_mandatory_attributes(
            [self.falling_level_cube])

        cube = create_new_diagnostic_cube(
            f"probability_of_{self.param}_at_surface",
            Unit("1"),
            self.falling_level_cube,
            mandatory_attributes,
            data=result_data,
        )
        return cube
    def _define_metadata(forecast_slice):
        """
        Define metadata that is specifically required for reliability table
        cubes, whilst ensuring any mandatory attributes are also populated.

        Args:
            forecast_slice (iris.cube.Cube):
                The source cube from which to get pre-existing metadata of use.
        Returns:
            dict:
                A dictionary of attributes that are appropriate for the
                reliability table cube.
        """
        attributes = generate_mandatory_attributes([forecast_slice])
        attributes["title"] = "Reliability calibration data table"
        return attributes
Example #25
0
 def _make_updraught_cube(self, data: np.ndarray) -> Cube:
     """Puts the data array into a CF-compliant cube"""
     attributes = {}
     if self.model_id_attr:
         attributes[self.model_id_attr] = self.precip.attributes[
             self.model_id_attr]
     cube = create_new_diagnostic_cube(
         "maximum_vertical_updraught",
         "m s-1",
         self.precip,
         mandatory_attributes=generate_mandatory_attributes(
             [self.precip, self.cape]),
         optional_attributes=attributes,
         data=data,
     )
     return cube
Example #26
0
    def _create_output_cube(self, orogenh_data, reference_cube):
        """Creates a cube containing orographic enhancement values in SI units.

        Args:
            orogenh_data (numpy.ndarray):
                Orographic enhancement value in mm h-1
            reference_cube (iris.cube.Cube):
                Cube with the correct time and forecast period coordinates on
                the UK standard grid

        Returns:
            iris.cube.Cube:
                Orographic enhancement cube (m s-1)
        """
        # create cube containing high resolution data in mm/h
        x_coord = self.topography.coord(axis="x")
        y_coord = self.topography.coord(axis="y")
        for coord in [x_coord, y_coord]:
            coord.points = coord.points.astype(np.float32)
            if coord.bounds is not None:
                coord.bounds = coord.bounds.astype(np.float32)

        aux_coords = []
        for coord in ["time", "forecast_reference_time", "forecast_period"]:
            aux_coords.append((reference_cube.coord(coord), None))

        attributes = generate_mandatory_attributes([reference_cube])
        attributes["title"] = "unknown"  # remove possible wrong grid info.
        for key in MOSG_GRID_ATTRIBUTES:
            try:
                attributes[key] = self.topography.attributes[key]
            except KeyError:
                pass

        orog_enhance_cube = iris.cube.Cube(
            orogenh_data,
            long_name="orographic_enhancement",
            units="mm h-1",
            attributes=attributes,
            dim_coords_and_dims=[(y_coord, 0), (x_coord, 1)],
            aux_coords_and_dims=aux_coords,
        )
        orog_enhance_cube.convert_units("m s-1")

        return orog_enhance_cube
    def create_phase_change_level_cube(self, wbt, phase_change_level):
        """
        Populate output cube with phase change data

        Args:
            wbt (iris.cube.Cube):
                Wet bulb temperature cube on height levels
            phase_change_level (numpy.ndarray):
                Calculated phase change level in metres

        Returns:
            iris.cube.Cube
        """
        name = "altitude_of_{}_level".format(self.phase_change_name)
        attributes = generate_mandatory_attributes([wbt])
        template = next(wbt.slices_over(["height"])).copy()
        template.remove_coord("height")
        return create_new_diagnostic_cube(
            name, "m", template, attributes, data=phase_change_level
        )
Example #28
0
    def _create_daynight_mask(self, cube):
        """
        Create blank daynight mask cube

        Args:
            cube (iris.cube.Cube):
                cube with the times and coordinates required for mask

        Returns:
            iris.cube.Cube:
                Blank daynight mask cube. The resulting cube will be the
                same shape as the time, y, and x coordinate, other coordinates
                will be ignored although they might appear as attributes
                on the cube as it is extracted from the first slice.
        """
        slice_coords = [cube.coord(axis="y"), cube.coord(axis="x")]
        if cube.coord("time") in cube.coords(dim_coords=True):
            slice_coords.insert(0, cube.coord("time"))

        template = next(cube.slices(slice_coords))
        demoted_coords = [
            crd
            for crd in cube.coords(dim_coords=True)
            if crd not in template.coords(dim_coords=True)
        ]
        for crd in demoted_coords:
            template.remove_coord(crd)
        attributes = generate_mandatory_attributes([template])
        title_attribute = {"title": "Day-Night mask"}
        data = np.full(template.data.shape, self.night, dtype=np.int32)
        daynight_mask = create_new_diagnostic_cube(
            "day_night_mask",
            1,
            template,
            attributes,
            optional_attributes=title_attribute,
            data=data,
            dtype=np.int32,
        )
        return daynight_mask
Example #29
0
def calculate_sleet_probability(prob_of_snow: Cube,
                                prob_of_rain: Cube) -> Cube:
    """
    This calculates the probability of sleet using the calculation:
    prob(sleet) = 1 - (prob(snow) + prob(rain))

    Args:
      prob_of_snow:
        Cube of the probability of snow. This can be a fraction (0 <= x <= 1) or
        categorical (0 or 1)
      prob_of_rain:
        Cube of the probability of rain. This can be a fraction (0 <= x <= 1) or
        categorical (0 or 1)

    Returns:
        Cube of the probability of sleet. This will be fractional or categorical,
        matching the highest precision of the inputs.

    Raises:
        ValueError: If the cube contains negative values for the the
                    probability of sleet.
    """
    sleet_prob = 1 - (prob_of_snow.data + prob_of_rain.data)
    if np.any(sleet_prob < 0):
        msg = "Negative values of sleet probability have been calculated."
        raise ValueError(msg)

    # Copy all of the attributes from the prob_of_snow cube
    mandatory_attributes = generate_mandatory_attributes(
        [prob_of_rain, prob_of_snow])
    probability_of_sleet = create_new_diagnostic_cube("probability_of_sleet",
                                                      "1",
                                                      prob_of_snow,
                                                      mandatory_attributes,
                                                      data=sleet_prob)
    return probability_of_sleet
Example #30
0
    def _calculate_freezing_rain_probability(self) -> Cube:
        """Calculate the probability of freezing rain from the probabilities
        of rain and sleet rates or accumulations, and the provided probabilities
        of temperature being below the freezing point of water.

        (probability of rain + probability of sleet) x (probability T < 0C)

        Returns:
            Cube of freezing rain probabilities.
        """
        freezing_rain_prob = (self.rain.data +
                              self.sleet.data) * self.temperature.data
        diagnostic_name = self.sleet.name().replace("sleet", "freezing_rain")
        threshold_name = (self.sleet.coord(
            var_name="threshold").name().replace("sleet", "freezing_rain"))
        mandatory_attributes = generate_mandatory_attributes(
            CubeList([self.rain, self.sleet]))
        optional_attributes = {}
        if self.model_id_attr:
            # Rain and sleet will always be derived from the same model, but temperature
            # may be diagnosed from a different model when creating a nowcast forecast.
            # The output in such a case is fundamentally a nowcast product, so we exclude
            # the temperature diagnostic when determining the model_id_attr.
            optional_attributes = update_model_id_attr_attribute(
                CubeList([self.rain, self.sleet]), self.model_id_attr)
        freezing_rain_cube = create_new_diagnostic_cube(
            diagnostic_name,
            "1",
            template_cube=self.sleet,
            mandatory_attributes=mandatory_attributes,
            optional_attributes=optional_attributes,
            data=freezing_rain_prob,
        )
        freezing_rain_cube.coord(var_name="threshold").rename(threshold_name)
        freezing_rain_cube.coord(threshold_name).var_name = "threshold"
        return freezing_rain_cube