Пример #1
0
    def check_probability_cube_metadata(self, cube: Cube) -> None:
        """Checks probability-specific metadata"""
        if cube.units != "1":
            self.errors.append(
                f"Expected units of 1 on probability data, got {cube.units}")

        try:
            self.diagnostic = get_diagnostic_cube_name_from_probability_name(
                cube.name())
        except ValueError as cause:
            # if the probability name is not valid
            self.errors.append(str(cause))

        expected_threshold_name = get_threshold_coord_name_from_probability_name(
            cube.name())

        if not cube.coords(expected_threshold_name):
            msg = f"Cube does not have expected threshold coord '{expected_threshold_name}'; "
            try:
                threshold_name = find_threshold_coordinate(cube).name()
            except CoordinateNotFoundError:
                coords = [coord.name() for coord in cube.coords()]
                msg += (
                    f"no coord with var_name='threshold' found in all coords: {coords}"
                )
                self.errors.append(msg)
            else:
                msg += f"threshold coord has incorrect name '{threshold_name}'"
                self.errors.append(msg)
                self.check_threshold_coordinate_properties(
                    cube.name(), cube.coord(threshold_name))
        else:
            threshold_coord = cube.coord(expected_threshold_name)
            self.check_threshold_coordinate_properties(cube.name(),
                                                       threshold_coord)
Пример #2
0
    def __init__(
        self,
        input_cube: Cube,
        vel_x: Cube,
        vel_y: Cube,
        orographic_enhancement_cube: Optional[Cube] = None,
        attributes_dict: Optional[Dict] = None,
    ) -> None:
        """
        Initialises the object.
        This includes checking if orographic enhancement is provided and
        removing the orographic enhancement from the input file ready for
        extrapolation.
        An error is raised if the input cube is precipitation rate but no
        orographic enhancement cube is provided.

        Args:
            input_cube:
                A 2D cube containing data to be advected.
            vel_x:
                Cube containing a 2D array of velocities along the x
                coordinate axis
            vel_y:
                Cube containing a 2D array of velocities along the y
                coordinate axis
            orographic_enhancement_cube:
                Cube containing the orographic enhancement fields. May have
                data for multiple times in the cube. The orographic enhancement
                is removed from the input_cube before advecting, and added
                back on after advection.
            attributes_dict:
                Dictionary containing information for amending the attributes
                of the output cube.
        """
        if not (vel_x and vel_y):
            raise TypeError("Neither x velocity or y velocity can be None")

        self.orographic_enhancement_cube = orographic_enhancement_cube
        if self.orographic_enhancement_cube:
            (input_cube,) = ApplyOrographicEnhancement("subtract")(
                input_cube, self.orographic_enhancement_cube
            )
        elif (
            "precipitation_rate" in input_cube.name()
            or "rainfall_rate" in input_cube.name()
        ):
            msg = (
                "For precipitation or rainfall fields, orographic "
                "enhancement cube must be supplied."
            )
            raise ValueError(msg)
        self.input_cube = input_cube
        self.advection_plugin = AdvectField(
            vel_x, vel_y, attributes_dict=attributes_dict
        )
Пример #3
0
    def process(self, wind_speed: Cube, wind_dir: Cube) -> Tuple[Cube, Cube]:

        """
        Convert wind speed and direction into u,v components along input cube
        projection axes.

        Args:
            wind_speed:
                Cube containing wind speed values
            wind_dir:
                Cube containing wind direction values relative to true North

        Returns:
            - Cube containing wind speeds in the positive projection
              x-axis direction, with units and projection matching
              wind_speed cube.
            - Cube containing wind speeds in the positive projection
              y-axis direction, with units and projection matching
              wind_speed cube.
        """
        # check cubes contain the correct data (assuming CF standard names)
        if "wind_speed" not in wind_speed.name():
            msg = "{} cube does not contain wind speeds"
            raise ValueError("{} {}".format(wind_speed.name(), msg))

        if "wind" not in wind_dir.name() or "direction" not in wind_dir.name():
            msg = "{} cube does not contain wind directions"
            raise ValueError("{} {}".format(wind_dir.name(), msg))

        # check input cube coordinates match
        ignored_coords = ["wind_from_direction status_flag", "wind_speed status_flag"]
        unmatched_coords = compare_coords(
            [wind_speed, wind_dir], ignored_coords=ignored_coords
        )
        if unmatched_coords != [{}, {}]:
            msg = "Wind speed and direction cubes have unmatched coordinates"
            raise ValueError("{} {}".format(msg, unmatched_coords))

        # calculate angle adjustments for wind direction
        wind_dir_slice = next(
            wind_dir.slices(
                [wind_dir.coord(axis="y").name(), wind_dir.coord(axis="x").name()]
            )
        )
        adj = self.calc_true_north_offset(wind_dir_slice)

        # calculate grid eastward and northward speeds
        ucube, vcube = self.resolve_wind_components(wind_speed, wind_dir, adj)

        # relabel final cubes with CF compliant data names corresponding to
        # positive wind speeds along the x and y axes
        ucube.rename("grid_eastward_wind")
        vcube.rename("grid_northward_wind")

        return ucube, vcube
Пример #4
0
    def _create_output_cube(
        self, cube: Cube, advected_data: ndarray, timestep: timedelta
    ) -> Cube:
        """
        Create a cube and appropriate metadata to contain the advected forecast

        Args:
            cube:
                Source cube (before advection)
            advected_data:
                Advected data
            timestep:
                Time difference between the advected output and the source

        Returns:
            The output cube
        """
        attributes = generate_mandatory_attributes([cube])
        if "institution" in cube.attributes.keys():
            attributes["source"] = "{} Nowcast".format(attributes["institution"])
        else:
            attributes["source"] = "Nowcast"
        advected_cube = create_new_diagnostic_cube(
            cube.name(), cube.units, cube, attributes, data=advected_data
        )
        amend_attributes(advected_cube, self.attributes_dict)
        set_history_attribute(advected_cube, "Nowcast")

        self._update_time(cube.coord("time").copy(), advected_cube, timestep)
        self._add_forecast_reference_time(cube.coord("time").copy(), advected_cube)
        self._add_forecast_period(advected_cube, timestep)

        return advected_cube
Пример #5
0
    def process(self, cube: Cube) -> Tuple[Cube, Cube]:
        """
        Calculate the difference along the x and y axes and return
        the result in separate cubes. The difference along each axis is
        calculated using numpy.diff.

        Args:
            cube:
                Cube from which the differences will be calculated.

        Returns:
            - Cube after the differences have been calculated along the
              x axis.
            - Cube after the differences have been calculated along the
              y axis.
        """
        diffs = []
        for axis in ["x", "y"]:
            coord_name = cube.coord(axis=axis).name()
            diff_cube = self.create_difference_cube(
                cube, coord_name, self.calculate_difference(cube, coord_name)
            )
            self._update_metadata(diff_cube, coord_name, cube.name())
            diffs.append(diff_cube)
        return tuple(diffs)
Пример #6
0
    def resolve_wind_components(
        speed: Cube, angle: Cube, adj: ndarray
    ) -> Tuple[Cube, Cube]:
        """
        Perform trigonometric reprojection onto x and y axes

        Args:
            speed:
                Cube containing wind speed data
            angle:
                Cube containing wind directions as angles from true North
            adj:
                2D array of wind direction angle adjustments in radians, to
                convert zero reference from true North to grid North.
                Broadcast automatically if speed and angle cubes have extra
                dimensions.

        Returns:
            - Cube containing wind vector component in the positive
              x-direction u_speed
            - Cube containing wind vector component in the positive
              y-direction v_speed
        """
        angle.convert_units("radians")
        angle.data += adj

        # output vectors should be pointing "to" not "from"
        if "wind_from_direction" in angle.name():
            angle.data += np.pi
        sin_angle = np.sin(angle.data)
        cos_angle = np.cos(angle.data)
        uspeed = np.multiply(speed.data, sin_angle)
        vspeed = np.multiply(speed.data, cos_angle)
        return [speed.copy(data=uspeed), speed.copy(data=vspeed)]
Пример #7
0
    def _adjust_landsea(self, cube: Cube, target_grid: Cube) -> Cube:
        """
        Adjust regridded data using differences between the target landmask
        and that obtained by regridding the source grid landmask, to ensure
        that the "land" or "sea" nature of the points in the regridded cube
        matches that of the target grid.

        Args:
            cube:
                Cube after initial regridding
            target_grid:
                Cube containing landmask data on the target grid

        Returns:
            Adjusted cube
        """
        if self.landmask_name not in self.landmask_source_grid.name():
            msg = "Expected {} in input_landmask cube but found {}".format(
                self.landmask_name, repr(self.landmask_source_grid))
            warnings.warn(msg)

        if self.landmask_name not in target_grid.name():
            msg = "Expected {} in target_grid cube but found {}".format(
                self.landmask_name, repr(target_grid))
            warnings.warn(msg)

        return AdjustLandSeaPoints(vicinity_radius=self.landmask_vicinity)(
            cube, self.landmask_source_grid, target_grid)
Пример #8
0
 def __init__(
     self,
     radius: float = None,
     grid_point_radius: int = None,
     land_mask_cube: Cube = None,
 ) -> None:
     """
     Args:
         radius:
             Radius in metres used to define the vicinity within which to
             search for an occurrence.
         grid_point_radius:
             Alternatively, a number of grid points that defines the vicinity
             radius over which to search for an occurence.
         land_mask_cube:
             Binary land-sea mask data. True for land-points, False for sea.
             Restricts in-vicinity processing to only include points of a
             like mask value.
     """
     if radius is not None and grid_point_radius is not None:
         raise ValueError(
             "Only one of radius or grid_point_radius should be set")
     self.radius = radius
     self.grid_point_radius = grid_point_radius
     if land_mask_cube:
         if land_mask_cube.name() != "land_binary_mask":
             raise ValueError(
                 f"Expected land_mask_cube to be called land_binary_mask, "
                 f"not {land_mask_cube.name()}")
         self.land_mask = np.where(land_mask_cube.data >= 0.5, True, False)
     else:
         self.land_mask = None
     self.land_mask_cube = land_mask_cube
Пример #9
0
    def _create_output_cube(gradient: ndarray, diff: Cube, cube: Cube,
                            axis: str) -> Cube:
        """
        Create the output gradient cube.

        Args:
            gradient:
                Gradient values used in the data array of the resulting cube.
            diff:
                Cube containing differences along the x or y axis
            cube:
                Cube with correct output dimensions
            axis:
                Short-hand reference for the x or y coordinate, as allowed by
                iris.util.guess_coord_axis.

        Returns:
            A cube of the gradients in the coordinate direction specified.
        """
        grad_cube = create_new_diagnostic_cube(
            "gradient_of_" + cube.name(),
            cube.units / diff.coord(axis=axis).units,
            diff,
            MANDATORY_ATTRIBUTE_DEFAULTS,
            data=gradient,
        )
        return grad_cube
Пример #10
0
    def _apply_error_to_forecast(self, forecast_cube: Cube,
                                 error_percentiles_cube: Cube) -> Cube:
        """Apply the error distributions (as error percentiles) to the forecast cube.
        The result is a series (sub-ensemble) of values for each forecast realization.

        Note:

            Within the RainForests approach we work with an additive error correction
            as opposed to a multiplicative correction used in ECPoint. The advantage of
            using an additive error is that we are also able to calibrate zero-values in
            the input forecast.

        Warning:

            After applying the error distributions to the forecast cube, values outside
            the expected bounds of the forecast parameter can arise. These values occur when
            when the input forecast value is between error thresholds and there exists a
            lower bound on the observable value (eg. 0 in the case of rainfall).

            In this situation, error thresholds below the residual value (min(obs) - fcst)
            must have a probability of exceedance of 1, whereas as error thresholds above
            this value can take on any value between [0, 1]. In the subsequent step where
            error percentile values are extracted, the linear interpolation in mapping from
            probabilities to percentiles can give percentile values that lie below the
            residual value; when these are applied to the forecast value, they result in
            forecast values outside the expected bounds of the forecast parameter in the
            resultant sub-ensemble.

            To address this, we remap all values outside of the expected bounds to nearest
            bound (eg. negative values are mapped to 0 in the case of rainfall).

        Args:
            forecast_cube:
                Cube containing the forecast to be calibrated.
            error_percentiles_cube:
                Cube containing percentile values for the error distributions.

        Returns:
            Cube containing the forecast sub-ensembles.
        """
        # Apply the error_percentiles to the forecast_cube (additive correction)
        forecast_subensembles_data = (forecast_cube.data[:, np.newaxis] +
                                      error_percentiles_cube.data)
        # RAINFALL SPECIFIC IMPLEMENTATION:
        # As described above, we need to address value outside of expected bounds.
        # In the case of rainfall, we map all negative values to 0.
        forecast_subensembles_data = np.maximum(0.0,
                                                forecast_subensembles_data)
        # Return cube containing forecast subensembles
        return create_new_diagnostic_cube(
            name=forecast_cube.name(),
            units=forecast_cube.units,
            template_cube=error_percentiles_cube,
            mandatory_attributes=generate_mandatory_attributes([forecast_cube
                                                                ]),
            optional_attributes=forecast_cube.attributes,
            data=forecast_subensembles_data,
        )
Пример #11
0
    def build_diagnostic_cube(
        neighbour_cube: Cube,
        diagnostic_cube: Cube,
        spot_values: ndarray,
        additional_dims: Optional[List[DimCoord]] = None,
        scalar_coords: Optional[List[AuxCoord]] = None,
        auxiliary_coords: Optional[List[AuxCoord]] = None,
        unique_site_id: Optional[Union[List[str], ndarray]] = None,
        unique_site_id_key: Optional[str] = None,
    ) -> Cube:
        """
        Builds a spot data cube containing the extracted diagnostic values.

        Args:
            neighbour_cube:
                This cube is needed as a source for information about the spot
                sites which needs to be included in the spot diagnostic cube.
            diagnostic_cube:
                The cube is needed to provide the name and units of the
                diagnostic that is being processed.
            spot_values:
                An array containing the diagnostic values extracted for the
                required spot sites.
            additional_dims:
                Optional list containing iris.coord.DimCoords with any leading
                dimensions required before spot data.
            scalar_coords:
                Optional list containing iris.coord.AuxCoords with all scalar coordinates
                relevant for the spot sites.
            auxiliary_coords:
                Optional list containing iris.coords.AuxCoords which are non-scalar.
            unique_site_id:
                Optional list of 8-digit unique site identifiers.
            unique_site_id_key:
                String to name the unique_site_id coordinate. Required if
                unique_site_id is in use.

        Returns:
            A spot data cube containing the extracted diagnostic data.
        """
        spot_diagnostic_cube = build_spotdata_cube(
            spot_values,
            diagnostic_cube.name(),
            diagnostic_cube.units,
            neighbour_cube.coord("altitude").points,
            neighbour_cube.coord(axis="y").points,
            neighbour_cube.coord(axis="x").points,
            neighbour_cube.coord("wmo_id").points,
            unique_site_id=unique_site_id,
            unique_site_id_key=unique_site_id_key,
            scalar_coords=scalar_coords,
            auxiliary_coords=auxiliary_coords,
            additional_dims=additional_dims,
        )
        return spot_diagnostic_cube
Пример #12
0
    def _check_input_cubes(cube1: Cube, cube2: Cube) -> None:
        """Check that input cubes have appropriate and matching dimensions"""
        # check the nature of the input cubes, and raise a warning if they are
        # not both precipitation
        if cube1.name() != cube2.name():
            msg = "Input cubes contain different data types {} and {}"
            raise ValueError(msg.format(cube1.name(), cube2.name()))

        data_name = cube1.name().lower()
        if "rain" not in data_name and "precipitation" not in data_name:
            msg = ("Input data are of non-precipitation type {}.  Plugin "
                   "parameters have not been tested and may not be appropriate"
                   " for this variable.")
            warnings.warn(msg.format(cube1.name()))

        # check cubes have exactly two spatial dimension coordinates and a
        # scalar time coordinate
        check_input_coords(cube1, require_time=True)
        check_input_coords(cube2, require_time=True)

        # check cube dimensions match
        if cube1.coord(axis="x") != cube2.coord(axis="x") or cube1.coord(
                axis="y") != cube2.coord(axis="y"):
            raise InvalidCubeError("Input cubes on unmatched grids")

        # check grids are equal area
        check_if_grid_is_equal_area(cube1)
        check_if_grid_is_equal_area(cube2)
Пример #13
0
def invert_probabilities(cube: Cube) -> Cube:
    """Given a cube with a probability threshold, invert the probabilities
    relative to the existing thresholding inequality. Update the coordinate
    metadata to indicate the new threshold inequality.

    Args:
        cube:
            A probability cube with a threshold coordinate.

    Returns:
        Cube with the probabilities inverted relative to the input thresholding
        inequality.

    Raises:
        ValueError: If no threshold coordinate is found.
    """
    try:
        threshold = cube.coord(var_name="threshold")
    except CoordinateNotFoundError:
        raise ValueError(
            "Cube does not have a threshold coordinate, probabilities "
            "cannot be inverted if present.")

    comparison_operator_lookup = comparison_operator_dict()
    inequality = threshold.attributes["spp__relative_to_threshold"]
    (inverse, ) = set([
        value.inverse for key, value in comparison_operator_lookup.items()
        if value.spp_string == inequality
    ])
    new_inequality = comparison_operator_lookup[inverse].spp_string
    inverted_probabilities = cube.copy(data=(1.0 - cube.data))
    inverted_probabilities.coord(
        threshold).attributes["spp__relative_to_threshold"] = new_inequality

    new_name = (cube.name().replace("above", "below") if "above"
                in cube.name() else cube.name().replace("below", "above"))
    inverted_probabilities.rename(new_name)

    return inverted_probabilities
Пример #14
0
    def __init__(
        self,
        radii: Optional[List[Union[float, int]]] = None,
        grid_point_radii: Optional[List[Union[float, int]]] = None,
        land_mask_cube: Cube = None,
    ) -> None:
        """
        Args:
            radii:
                A list of radii in metres used to define the vicinities within
                which to search for occurrences.
            grid_point_radii:
                Alternatively, a list of numbers of grid points that define the
                vicinity radii over which to search for occurrences. Only one of
                radii or grid_point_radii should be set.
            land_mask_cube:
                Binary land-sea mask data. True for land-points, False for sea.
                Restricts in-vicinity processing to only include points of a
                like mask value.

        Raises:
            ValueError: If both radii and grid point radii are set.
            ValueError: If neither radii or grid point radii are set.
            ValueError: If a provided vicinity radius is negative.
            ValueError: Land mask not named land_binary_mask.
        """
        if radii and grid_point_radii:
            raise ValueError(
                "Vicinity processing requires that only one of radii or "
                "grid_point_radii should be set")
        if not radii and not grid_point_radii:
            raise ValueError(
                "Vicinity processing requires that one of radii or "
                "grid_point_radii should be set to a non-zero value")
        if (radii and any(np.array(radii) < 0)) or (
                grid_point_radii and any(np.array(grid_point_radii) < 0)):
            raise ValueError(
                "Vicinity processing requires only positive vicinity radii")

        self.radii = radii if radii else grid_point_radii
        self.native_grid_point_radius = False if radii else True

        if land_mask_cube:
            if land_mask_cube.name() != "land_binary_mask":
                raise ValueError(
                    f"Expected land_mask_cube to be called land_binary_mask, "
                    f"not {land_mask_cube.name()}")
            self.land_mask = np.where(land_mask_cube.data >= 0.5, True, False)
        else:
            self.land_mask = None
        self.land_mask_cube = land_mask_cube
Пример #15
0
    def build_diagnostic_cube(
        neighbour_cube: Cube,
        diagnostic_cube: Cube,
        spot_values: ndarray,
        additional_dims: Optional[List[DimCoord]] = None,
        scalar_coords: Optional[List[AuxCoord]] = None,
    ) -> Cube:
        """
        Builds a spot data cube containing the extracted diagnostic values.

        Args:
            neighbour_cube:
                This cube is needed as a source for information about the spot
                sites which needs to be included in the spot diagnostic cube.
            diagnostic_cube:
                The cube is needed to provide the name and units of the
                diagnostic that is being processed.
            spot_values:
                An array containing the diagnostic values extracted for the
                required spot sites.
            additional_dims:
                Optional list containing iris.coord.DimCoords with any leading
                dimensions required before spot data.
            scalar_coords:
                Optional list containing iris.coord.AuxCoords with all scalar coordinates
                relevant for the spot sites.

        Returns:
            A spot data cube containing the extracted diagnostic data.
        """

        neighbour_cube = build_spotdata_cube(
            spot_values,
            diagnostic_cube.name(),
            diagnostic_cube.units,
            neighbour_cube.coord("altitude").points,
            neighbour_cube.coord(axis="y").points,
            neighbour_cube.coord(axis="x").points,
            neighbour_cube.coord("wmo_id").points,
            scalar_coords=scalar_coords,
            additional_dims=additional_dims,
        )
        return neighbour_cube
Пример #16
0
def _check_metadata(cube: Cube) -> None:
    """
    Checks cube metadata that needs to be correct to guarantee data integrity

    Args:
        cube:
            Cube to be checked

    Raises:
        ValueError: if time coordinates do not have the required datatypes
            and units; needed because values may be wrong
        ValueError: if numerical datatypes are other than 32-bit (except
            where specified); needed because values may be wrong
        ValueError: if cube dataset has unknown units; because this may cause
            misinterpretation on "load"
    """
    check_mandatory_standards(cube)
    if cf_units.Unit(cube.units).is_unknown():
        raise ValueError("{} has unknown units".format(cube.name()))
Пример #17
0
    def check_cell_methods(self, cube: Cube) -> None:
        """Checks cell methods are permitted and correct"""
        if any([substr in cube.name() for substr in PRECIP_ACCUM_NAMES]):
            msg = f"Expected sum over time cell method for {cube.name()}"
            if not cube.cell_methods:
                self.errors.append(msg)
            else:
                found_cm = False
                for cm in cube.cell_methods:
                    if (
                        cm.method == PRECIP_ACCUM_CM.method
                        and cm.coord_names == PRECIP_ACCUM_CM.coord_names
                    ):
                        found_cm = True
                if not found_cm:
                    self.errors.append(msg)

        for cm in cube.cell_methods:
            if cm.method in COMPLIANT_CM_METHODS:
                self.methods += f" {cm.method} over {cm.coord_names[0]}"
                if self.field_type == self.PROB:
                    if not cm.comments or cm.comments[0] != f"of {self.diagnostic}":
                        self.errors.append(
                            f"Cell method {cm} on probability data should have comment "
                            f"'of {self.diagnostic}'"
                        )
                # check point and bounds on method coordinate
                if "time" in cm.coord_names:
                    if cube.coord("time").bounds is None:
                        self.errors.append(f"Cube of{self.methods} has no time bounds")

            elif cm in NONCOMP_CMS or cm.method in NONCOMP_CM_METHODS:
                self.errors.append(f"Non-standard cell method {cm}")
            else:
                # flag method which might be invalid, but we can't be sure
                self.warnings.append(
                    f"Unexpected cell method {cm}. Please check the standard to "
                    "ensure this is valid"
                )
Пример #18
0
def find_threshold_coordinate(cube: Cube) -> Coord:
    """Find threshold coordinate in cube.

    Compatible with both the old (cube.coord("threshold")) and new
    (cube.coord.var_name == "threshold") IMPROVER metadata standards.

    Args:
        cube:
            Cube containing thresholded probability data

    Returns:
        Threshold coordinate

    Raises:
        TypeError: If cube is not of type iris.cube.Cube.
        CoordinateNotFoundError: If no threshold coordinate is found.
    """
    if not isinstance(cube, iris.cube.Cube):
        msg = (
            "Expecting data to be an instance of "
            "iris.cube.Cube but is {0}.".format(type(cube))
        )
        raise TypeError(msg)

    threshold_coord = None
    try:
        threshold_coord = cube.coord("threshold")
    except CoordinateNotFoundError:
        for coord in cube.coords():
            if coord.var_name == "threshold":
                threshold_coord = coord
                break

    if threshold_coord is None:
        msg = "No threshold coord found on {0:s} data".format(cube.name())
        raise CoordinateNotFoundError(msg)

    return threshold_coord
Пример #19
0
def find_percentile_coordinate(cube: Cube) -> Coord:
    """Find percentile coord in cube.

    Args:
        cube:
            Cube contain one or more percentiles.

    Returns:
        Percentile coordinate.

    Raises:
        TypeError: If cube is not of type iris.cube.Cube.
        CoordinateNotFoundError: If no percentile coordinate is found in cube.
        ValueError: If there is more than one percentile coords in the cube.
    """
    if not isinstance(cube, iris.cube.Cube):
        msg = (
            "Expecting data to be an instance of "
            "iris.cube.Cube but is {0}.".format(type(cube))
        )
        raise TypeError(msg)
    standard_name = cube.name()
    perc_coord = None
    perc_found = 0
    for coord in cube.coords():
        if coord.name().find("percentile") >= 0:
            perc_found += 1
            perc_coord = coord

    if perc_found == 0:
        msg = "No percentile coord found on {0:s} data".format(standard_name)
        raise CoordinateNotFoundError(msg)

    if perc_found > 1:
        msg = "Too many percentile coords found on {0:s} data".format(standard_name)
        raise ValueError(msg)

    return perc_coord
Пример #20
0
    def __init__(self, distance: float, land_mask_cube: Cube = None) -> None:
        """
        Initialise the class.

        Args:
            distance:
                Distance in metres used to define the vicinity within which to
                search for an occurrence.
            land_mask_cube:
                Binary land-sea mask data. True for land-points, False for sea.
                Restricts in-vicinity processing to only include points of a
                like mask value.
        """
        self.distance = distance
        if land_mask_cube:
            if land_mask_cube.name() != "land_binary_mask":
                raise ValueError(
                    f"Expected land_mask_cube to be called land_binary_mask, "
                    f"not {land_mask_cube.name()}")
            self.land_mask = np.where(land_mask_cube.data >= 0.5, True, False)
        else:
            self.land_mask = None
        self.land_mask_cube = land_mask_cube
Пример #21
0
    def _validate_snow_fraction(snow_fraction: Cube) -> None:
        """Ensures that the input snow-fraction field has appropriate name
        (snow_fraction), units (1) and data (between 0 and 1 inclusive).

        Args:
            snow_fraction

        Raises
            ValueError:
                If any of the above are not True.
        """
        if snow_fraction.name() != "snow_fraction":
            raise ValueError(
                f"Expected cube named 'snow_fraction', not {snow_fraction.name()}"
            )
        if f"{snow_fraction.units}" != "1":
            raise ValueError(f"Expected cube with units '1', not {snow_fraction.units}")
        if np.ma.is_masked(snow_fraction.data):
            raise NotImplementedError("SignificantPhaseMask cannot handle masked data")
        if np.any((snow_fraction.data < 0) | (snow_fraction.data > 1)):
            raise ValueError(
                f"Expected cube data to be in range 0 <= x <= 1. "
                f"Found max={snow_fraction.data.max()}; min={snow_fraction.data.min()}"
            )
Пример #22
0
    def process(self, spot_data_cube: Cube, neighbour_cube: Cube,
                gridded_lapse_rate_cube: Cube) -> Cube:
        """
        Extract lapse rates from the appropriate grid points and apply them to
        the spot extracted temperatures.

        The calculation is::

         lapse_rate_adjusted_temperatures = temperatures + lapse_rate *
         vertical_displacement

        Args:
            spot_data_cube:
                A spot data cube of temperatures for the spot data sites,
                extracted from the gridded temperature field. These
                temperatures will have been extracted using the same
                neighbour_cube and neighbour_selection_method that are being
                used here.
            neighbour_cube:
                The neighbour_cube that contains the grid coordinates at which
                lapse rates should be extracted and the vertical displacement
                between those grid points on the model orography and the spot
                data sites actual altitudes. This cube is only updated when
                a new site is added.
            gridded_lapse_rate_cube:
                A cube of temperature lapse rates on the same grid as that from
                which the spot data temperatures were extracted.

        Returns:
            A copy of the input spot_data_cube with the data modified by
            the lapse rates to give a better representation of the site's
            temperatures.

        Raises:
            ValueError:
                If the lapse rate cube was provided but the diagnostic being
                processed is not air temperature.
            ValueError:
                If the lapse rate cube provided does not have the name
                "air_temperature_lapse_rate"
            ValueError:
                If the lapse rate cube does not contain a single valued height
                coordinate.

        Warns:
            warning:
                If a lapse rate cube was provided, but the height of the
                temperature does not match that of the data used.
        """

        if is_probability(spot_data_cube):
            msg = (
                "Input cube has a probability coordinate which cannot be lapse "
                "rate adjusted. Input data should be in percentile or "
                "deterministic space only.")
            raise ValueError(msg)

        # Check that we are dealing with temperature data.
        if spot_data_cube.name() not in [
                "air_temperature", "feels_like_temperature"
        ]:
            msg = (
                "The diagnostic being processed is not air temperature "
                "or feels like temperature and therefore cannot be adjusted.")
            raise ValueError(msg)

        if not gridded_lapse_rate_cube.name() == "air_temperature_lapse_rate":
            msg = ("A cube has been provided as a lapse rate cube but does "
                   "not have the expected name air_temperature_lapse_rate: "
                   "{}".format(gridded_lapse_rate_cube.name()))
            raise ValueError(msg)

        try:
            lapse_rate_height_coord = gridded_lapse_rate_cube.coord("height")
        except (CoordinateNotFoundError):
            msg = ("Lapse rate cube does not contain a single valued height "
                   "coordinate. This is required to ensure it is applied to "
                   "equivalent temperature data.")
            raise CoordinateNotFoundError(msg)

        # Check the height of the temperature data matches that used to
        # calculate the lapse rates. If so, adjust temperatures using the lapse
        # rate values.
        if not spot_data_cube.coord("height") == lapse_rate_height_coord:
            raise ValueError(
                "A lapse rate cube was provided, but the height of the "
                "temperature data does not match that of the data used "
                "to calculate the lapse rates. As such the temperatures "
                "were not adjusted with the lapse rates.")

        # Check the cubes are compatible.
        check_grid_match(
            [neighbour_cube, spot_data_cube, gridded_lapse_rate_cube])

        # Extract the lapse rates that correspond to the spot sites.
        spot_lapse_rate = SpotExtraction(
            neighbour_selection_method=self.neighbour_selection_method)(
                neighbour_cube, gridded_lapse_rate_cube)

        # Extract vertical displacements between the model orography and sites.
        method_constraint = iris.Constraint(
            neighbour_selection_method_name=self.neighbour_selection_method)
        data_constraint = iris.Constraint(
            grid_attributes_key="vertical_displacement")
        vertical_displacement = neighbour_cube.extract(method_constraint
                                                       & data_constraint)

        # Apply lapse rate adjustment to the temperature at each site.
        new_spot_lapse_rate = iris.util.broadcast_to_shape(
            spot_lapse_rate.data, spot_data_cube.shape, [-1])
        new_temperatures = (
            spot_data_cube.data +
            (new_spot_lapse_rate * vertical_displacement.data)).astype(
                np.float32)
        new_spot_cube = spot_data_cube.copy(data=new_temperatures)
        return new_spot_cube
Пример #23
0
    def _regrid_to_target(
        self,
        cube: Cube,
        target_grid: Cube,
        regridded_title: Optional[str],
        regrid_mode: str,
    ) -> Cube:
        """
        Regrid cube to target_grid, inherit grid attributes and update title

        Args:
            cube:
                Cube to be regridded
            target_grid:
                Data on the target grid. If regridding with mask, this cube
                should contain land-sea mask data to be used in adjusting land
                and sea points after regridding.
            regridded_title:
                New value for the "title" attribute to be used after
                regridding. If not set, a default value is used.
            regrid_mode:
                "bilinear","nearest","nearest-with-mask",
                "nearest-2","nearest-with-mask-2","bilinear-2","bilinear-with-mask-2"

        Returns:
            Regridded cube with updated attributes.
        """
        if regrid_mode in (
                "nearest-with-mask",
                "nearest-with-mask-2",
                "bilinear-with-mask-2",
        ):
            if self.landmask_name not in self.landmask_source_grid.name():
                msg = "Expected {} in input_landmask cube but found {}".format(
                    self.landmask_name, repr(self.landmask_source_grid))
                warnings.warn(msg)

            if self.landmask_name not in target_grid.name():
                msg = "Expected {} in target_grid cube but found {}".format(
                    self.landmask_name, repr(target_grid))
                warnings.warn(msg)

        # basic categories (1) Iris-based (2) new nearest based  (3) new bilinear-based
        if regrid_mode in ("bilinear", "nearest", "nearest-with-mask"):
            if "nearest" in regrid_mode:
                regridder = Nearest(extrapolation_mode=self.extrapolation_mode)
            else:
                regridder = Linear(extrapolation_mode=self.extrapolation_mode)
            cube = cube.regrid(target_grid, regridder)

            # Iris regridding is used, and then adjust if land_sea mask is considered
            if self.REGRID_REQUIRES_LANDMASK[regrid_mode]:
                cube = AdjustLandSeaPoints(
                    vicinity_radius=self.landmask_vicinity,
                    extrapolation_mode=self.extrapolation_mode,
                )(cube, self.landmask_source_grid, target_grid)

        # new version of nearest/bilinear option with/without land-sea mask
        elif regrid_mode in (
                "nearest-2",
                "nearest-with-mask-2",
                "bilinear-2",
                "bilinear-with-mask-2",
        ):
            cube = RegridWithLandSeaMask(
                regrid_mode=regrid_mode,
                vicinity_radius=self.landmask_vicinity)(
                    cube, self.landmask_source_grid, target_grid)

        # identify grid-describing attributes on source cube that need updating
        required_grid_attributes = [
            attr for attr in cube.attributes if attr in MOSG_GRID_ATTRIBUTES
        ]

        # update attributes if available on target grid, otherwise remove
        for key in required_grid_attributes:
            if key in target_grid.attributes:
                cube.attributes[key] = target_grid.attributes[key]
            else:
                cube.attributes.pop(key)

        cube.attributes["title"] = (MANDATORY_ATTRIBUTE_DEFAULTS["title"]
                                    if regridded_title is None else
                                    regridded_title)

        return cube
Пример #24
0
    def process(self, input_cube: Cube) -> Cube:
        """Convert each point to a truth value based on provided threshold
        values. The truth value may or may not be fuzzy depending upon if
        fuzzy_bounds are supplied.  If the plugin has a "threshold_units"
        member, this is used to convert both thresholds and fuzzy bounds into
        the units of the input cube.

        Args:
            input_cube:
                Cube to threshold. The code is dimension-agnostic.

        Returns:
            Cube after a threshold has been applied. The data within this
            cube will contain values between 0 and 1 to indicate whether
            a given threshold has been exceeded or not.

                The cube meta-data will contain:
                * Input_cube name prepended with
                probability_of_X_above(or below)_threshold (where X is
                the diagnostic under consideration)
                * Threshold dimension coordinate with same units as input_cube
                * Threshold attribute ("greater_than",
                "greater_than_or_equal_to", "less_than", or
                less_than_or_equal_to" depending on the operator)
                * Cube units set to (1).

        Raises:
            ValueError: if a np.nan value is detected within the input cube.
        """
        if np.isnan(input_cube.data).any():
            raise ValueError("Error: NaN detected in input cube data")

        self.threshold_coord_name = input_cube.name()

        thresholded_cubes = iris.cube.CubeList()
        for threshold, bounds in zip(self.thresholds, self.fuzzy_bounds):
            cube = input_cube.copy()
            if self.threshold_units is not None:
                cube.convert_units(self.threshold_units)
            # if upper and lower bounds are equal, set a deterministic 0/1
            # probability based on exceedance of the threshold
            if bounds[0] == bounds[1]:
                truth_value = self.comparison_operator["function"](cube.data,
                                                                   threshold)
            # otherwise, scale exceedance probabilities linearly between 0/1
            # at the min/max fuzzy bounds and 0.5 at the threshold value
            else:
                truth_value = np.where(
                    cube.data < threshold,
                    rescale(
                        cube.data,
                        data_range=(bounds[0], threshold),
                        scale_range=(0.0, 0.5),
                        clip=True,
                    ),
                    rescale(
                        cube.data,
                        data_range=(threshold, bounds[1]),
                        scale_range=(0.5, 1.0),
                        clip=True,
                    ),
                )
                # if requirement is for probabilities less_than or
                # less_than_or_equal_to the threshold (rather than
                # greater_than or greater_than_or_equal_to), invert
                # the exceedance probability
                if "less_than" in self.comparison_operator["spp_string"]:
                    truth_value = 1.0 - truth_value

            truth_value = truth_value.astype(FLOAT_DTYPE)

            if np.ma.is_masked(cube.data):
                # update unmasked points only
                cube.data[~input_cube.data.mask] = truth_value[~input_cube.
                                                               data.mask]
            else:
                cube.data = truth_value

            self._add_threshold_coord(cube, threshold)
            cube.coord(var_name="threshold").convert_units(input_cube.units)

            for func in self.each_threshold_func:
                cube = func(cube)

            thresholded_cubes.append(cube)

        (cube, ) = thresholded_cubes.merge()
        # Re-cast to 32bit now that any unit conversion has already taken place.
        cube.coord(var_name="threshold").points = cube.coord(
            var_name="threshold").points.astype(FLOAT_DTYPE)

        self._update_metadata(cube)
        enforce_coordinate_ordering(cube, ["realization", "percentile"])

        return cube
Пример #25
0
def calculate_uv_index(
    uv_upward: Cube,
    uv_downward: Cube,
    scale_factor: float = 3.6,
    model_id_attr: Optional[str] = None,
) -> Cube:
    """
    A plugin to calculate the uv index using radiation flux in UV downward
    at surface, radiation flux UV upward at surface and a scaling factor.
    The scaling factor is configurable by the user.

    Args:
        uv_upward:
            A cube of the radiation flux in UV upward at surface. This is a
            UM diagnostic produced by the UM radiation scheme.
            This band covers 200-320 nm and uses six absorption coefficients
            for ozone and one Rayleigh scattering coefficient(W m-2)
        uv_downward:
            A cube of the radiation flux in UV downward at surface.
            This is a UM diagnostic produced by the UM radiation scheme
            see above or the paper referenced for more details.(W m-2)
        scale_factor:
            The uv scale factor. Default is 3.6. This factor has
            been empirically derived and should not be
            changed except if there are scientific reasons to
            do so. For more information see section 2.1.1 of the paper
            referenced below (no units)
        model_id_attr:
            Name of the attribute used to identify the source model for
            blending.

    Returns:
        A cube of the calculated UV index.

    Raises:
        ValueError: If uv_upward is not named correctly.
        ValueError: If uv_downward is not named correctly.
        ValueError: If units do not match.

    References:
        Turner, E.C, Manners, J. Morcette, C. J, O'Hagan, J. B,
        & Smedley, A.R.D. (2017): Toward a New UV Index Diagnostic
        in the Met Office's Forecast Model. Journal of Advances in
        Modeling Earth Systems 9, 2654-2671.

    """
    if uv_upward.name() != "surface_upwelling_ultraviolet_flux_in_air":
        msg = ("The radiation flux in UV upward has the wrong name, "
               "it should be "
               "surface_upwelling_ultraviolet_flux_in_air "
               "but is {}".format(uv_upward.name()))
        raise ValueError(msg)
    if uv_downward.name() != "surface_downwelling_ultraviolet_flux_in_air":
        msg = ("The radiation flux in UV downward has the wrong name, "
               "it should be "
               "surface_downwelling_ultraviolet_flux_in_air "
               "but is {}".format(uv_downward.name()))
        raise ValueError(msg)
    if uv_upward.units != uv_downward.units:
        msg = "The input uv files do not have the same units."
        raise ValueError(msg)

    uv_data = (uv_upward.data + uv_downward.data) * scale_factor
    attributes = generate_mandatory_attributes([uv_upward, uv_downward],
                                               model_id_attr=model_id_attr)
    uv_index = create_new_diagnostic_cube("ultraviolet_index",
                                          "1",
                                          uv_upward,
                                          attributes,
                                          data=uv_data)

    return uv_index
Пример #26
0
    def process(
        self,
        initial_cube: Cube,
        ucube: Cube,
        vcube: Cube,
        orographic_enhancement: Cube,
        attributes_dict: Optional[Dict] = None,
    ) -> List[Cube]:
        """
        Extrapolate the initial precipitation field using the velocities
        provided to the required forecast lead times

        Args:
            initial_cube:
                Cube of precipitation at initial time
            ucube:
                x-advection velocities
            vcube:
                y-advection velocities
            orographic_enhancement:
                Cube containing orographic enhancement fields at all required
                lead times
            attributes_dict:
                Dictionary containing information for amending the attributes
                of the output cube.

        Returns:
            List of extrapolated iris.cube.Cube instances at the required
            lead times (including T+0 / analysis time)
        """
        # ensure input cube is suitable for advection
        if "rate" not in initial_cube.name():
            msg = "{} is not a precipitation rate cube"
            raise ValueError(msg.format(initial_cube.name()))
        check_if_grid_is_equal_area(initial_cube)

        self.analysis_cube = initial_cube.copy()
        self.required_units = initial_cube.units
        self.orogenh = orographic_enhancement

        # get unmasked precipitation rate array with orographic enhancement
        # subtracted to input into advection
        precip_rate = self._get_advectable_precip_rate()

        # calculate displacement in grid squares per time step
        displacement = self._generate_displacement_array(ucube, vcube)

        # PySteps prints a message on import to stdout - trap this
        # This should be removed for PySteps v1.1.0 which has a configuration setting
        # for this
        # Import here to minimise dependencies
        with redirect_stdout():
            from pysteps.extrapolation.semilagrangian import extrapolate
        # call pysteps extrapolation method; using interp_order=0 which is
        # nearest neighbour
        all_forecasts = extrapolate(
            precip_rate,
            displacement,
            self.num_timesteps,
            allow_nonfinite_values=True,
            interp_order=0,
        )

        # repackage data as IMPROVER masked cubes
        forecast_cubes = self._generate_forecast_cubes(all_forecasts,
                                                       attributes_dict)

        return forecast_cubes
Пример #27
0
    def process(self, input_cube: Cube) -> Cube:
        """Convert each point to a truth value based on provided threshold
        function. If the plugin has a "threshold_units"
        member, this is used to convert a copy of the input_cube into
        the units specified.

        Args:
            input_cube:
                Cube to threshold. Must have a latitude coordinate.

        Returns:
            Cube after a threshold has been applied. The data within this
            cube will contain values between 0 and 1 to indicate whether
            a given threshold has been exceeded or not.

                The cube meta-data will contain:
                * Input_cube name prepended with
                probability_of_X_above(or below)_threshold (where X is
                the diagnostic under consideration)
                * Threshold dimension coordinate with same units as input_cube
                * Threshold attribute ("greater_than",
                "greater_than_or_equal_to", "less_than", or
                less_than_or_equal_to" depending on the operator)
                * Cube units set to (1).

        Raises:
            ValueError: if a np.nan value is detected within the input cube.
        """
        if np.isnan(input_cube.data).any():
            raise ValueError("Error: NaN detected in input cube data")

        self.threshold_coord_name = input_cube.name()

        cube = input_cube.copy()
        if self.threshold_units is not None:
            cube.convert_units(self.threshold_units)

        cube.coord("latitude").convert_units("degrees")
        threshold_variant = cube.coord("latitude").points
        threshold_over_latitude = np.array(
            self.threshold_function(threshold_variant))

        # Add a scalar axis for the longitude axis so that numpy's array-
        # broadcasting knows what we want to do
        truth_value = self.comparison_operator["function"](
            cube.data,
            np.expand_dims(threshold_over_latitude, 1),
        )

        truth_value = truth_value.astype(FLOAT_DTYPE)

        if np.ma.is_masked(cube.data):
            # update unmasked points only
            cube.data[~input_cube.data.mask] = truth_value[~input_cube.data.
                                                           mask]
        else:
            cube.data = truth_value

        self._add_latitude_threshold_coord(cube, threshold_over_latitude)
        cube.coord(var_name="threshold").convert_units(input_cube.units)

        self._update_metadata(cube)
        enforce_coordinate_ordering(cube, ["realization", "percentile"])

        return cube
Пример #28
0
    def run(self, cube: Cube) -> None:
        """Populates self-consistent interpreted parameters, or raises collated errors
        describing (as far as posible) how the metadata are a) not self-consistent,
        and / or b) not consistent with the Met Office IMPROVER standard.

        Although every effort has been made to return as much information as possible,
        collated errors may not be complete if the issue is fundamental. The developer
        is advised to rerun this tool after each fix, until no further problems are
        raised.
        """

        # 1) Interpret diagnostic and type-specific metadata, including cell methods
        if cube.name() in ANCILLARIES:
            self.field_type = self.ANCIL
            self.diagnostic = cube.name()
            if cube.cell_methods:
                self.errors.append(f"Unexpected cell methods {cube.cell_methods}")

        elif cube.name() in SPECIAL_CASES:
            self.field_type = self.diagnostic = cube.name()
            if cube.name() == "weather_code":
                for cm in cube.cell_methods:
                    if cm == WXCODE_MODE_CM and cube.name() in WXCODE_NAMES:
                        pass
                    else:
                        self.errors.append(
                            f"Unexpected cell methods {cube.cell_methods}"
                        )
            elif cube.name() == "wind_from_direction":
                if cube.cell_methods:
                    expected = CellMethod(method="mean", coords="realization")
                    if len(cube.cell_methods) > 1 or cube.cell_methods[0] != expected:
                        self.errors.append(
                            f"Unexpected cell methods {cube.cell_methods}"
                        )
            else:
                self.unhandled = True
                return

        else:
            if "probability" in cube.name() and "threshold" in cube.name():
                self.field_type = self.PROB
                self.check_probability_cube_metadata(cube)
            else:
                self.diagnostic = cube.name()
                try:
                    perc_coord = find_percentile_coordinate(cube)
                except CoordinateNotFoundError:
                    coords = get_coord_names(cube)
                    if any(
                        [cube.coord(coord).var_name == "threshold" for coord in coords]
                    ):
                        self.field_type = self.PROB
                        self.check_probability_cube_metadata(cube)
                    else:
                        self.field_type = self.DIAG
                else:
                    self.field_type = self.PERC
                    if perc_coord.name() != PERC_COORD:
                        self.errors.append(
                            f"Percentile coordinate should have name {PERC_COORD}, "
                            f"has {perc_coord.name()}"
                        )

                    if perc_coord.units != "%":
                        self.errors.append(
                            "Percentile coordinate should have units of %, "
                            f"has {perc_coord.units}"
                        )

            self.check_cell_methods(cube)

        # 2) Interpret model and blend information from cube attributes
        self.check_attributes(cube.attributes)

        # 3) Check whether expected coordinates are present
        coords = get_coord_names(cube)
        if "spot_index" in coords:
            self.check_spot_data(cube, coords)

        if self.field_type == self.ANCIL:
            # there is no definitive standard for time coordinates on static ancillaries
            pass
        elif cube.coords("time_in_local_timezone"):
            # For data on local timezones, the time coordinate will match the horizontal
            # dimensions and there will be no forecast period.
            expected_coords = set(LOCAL_TIME_COORDS + UNBLENDED_TIME_COORDS)
            expected_coords.discard("forecast_period")
            self._check_coords_present(coords, expected_coords)
            self._check_coords_are_horizontal(cube, ["time"])
        elif self.blended:
            self._check_coords_present(coords, BLENDED_TIME_COORDS)
        else:
            self._check_coords_present(coords, UNBLENDED_TIME_COORDS)

        # 4) Check points are equal to upper bounds for bounded time coordinates
        for coord in ["time", "forecast_period"]:
            if coord in get_coord_names(cube):
                self._check_coord_bounds(cube, coord)

        # 5) Check datatypes on data and coordinates
        try:
            check_mandatory_standards(cube)
        except ValueError as cause:
            self.errors.append(str(cause))

        # 6) Check multiple realizations only exist for ensemble models
        if self.field_type == self.DIAG:
            try:
                realization_coord = cube.coord("realization")
            except CoordinateNotFoundError:
                pass
            else:
                model_id = cube.attributes.get(self.model_id_attr, "ens")
                if "ens" not in model_id and len(realization_coord.points) > 1:
                    self.errors.append(
                        f"Deterministic model should not have {len(realization_coord.points)} "
                        "realizations"
                    )

        # 7) Raise collated errors if present
        if self.errors:
            raise ValueError("\n".join(self.errors))
Пример #29
0
def calculate_uv_index(
    uv_downward: Cube,
    scale_factor: float = 3.6,
    model_id_attr: Optional[str] = None,
) -> Cube:
    """
    A plugin to calculate the uv index using radiation flux in UV downward
    at the surface and a scaling factor.
    The scaling factor is configurable by the user.

    Args:
        uv_downward:
            A cube of the radiation flux in UV downward at surface.
            This is a UM diagnostic produced by the UM radiation scheme
            see above or the paper referenced for more details.(W m-2)
        scale_factor:
            The uv scale factor. Default is 3.6 (m2 W-1). This factor has
            been empirically derived and should not be
            changed except if there are scientific reasons to
            do so. For more information see section 2.1.1 of the paper
            referenced below.
        model_id_attr:
            Name of the attribute used to identify the source model for
            blending.

    Returns:
        A cube of the calculated UV index.

    Raises:
        ValueError: If uv_downward is not named correctly.
        ValueError: If uv_downward contains values that are negative or
        not a number.

    References:
        Turner, E.C, Manners, J. Morcrette, C. J, O'Hagan, J. B,
        & Smedley, A.R.D. (2017): Toward a New UV Index Diagnostic
        in the Met Office's Forecast Model. Journal of Advances in
        Modeling Earth Systems 9, 2654-2671.

    """

    if uv_downward.name() != "surface_downwelling_ultraviolet_flux_in_air":
        msg = ("The radiation flux in UV downward has the wrong name, "
               "it should be "
               "surface_downwelling_ultraviolet_flux_in_air "
               "but is {}".format(uv_downward.name()))
        raise ValueError(msg)

    if np.any(uv_downward.data < 0) or np.isnan(uv_downward.data).any():
        msg = ("The radiation flux in UV downward contains data "
               "that is negative or NaN. Data should be >= 0.")
        raise ValueError(msg)

    uv_downward.convert_units("W m-2")
    uv_data = uv_downward.data * scale_factor
    attributes = generate_mandatory_attributes([uv_downward],
                                               model_id_attr=model_id_attr)
    uv_index = create_new_diagnostic_cube("ultraviolet_index",
                                          "1",
                                          uv_downward,
                                          attributes,
                                          data=uv_data)

    return uv_index