Exemple #1
0
    def _create_output_cube(self, cube, advected_data, timestep):
        """
        Create a cube and appropriate metadata to contain the advected forecast

        Args:
            cube (iris.cube.Cube):
                Source cube (before advection)
            advected_data (numpy.ndarray):
                Advected data
            timestep (datetime.timedelta):
                Time difference between the advected output and the source

        Returns:
            iris.cube.Cube
        """
        attributes = generate_mandatory_attributes([cube])
        if "institution" in cube.attributes.keys():
            attributes["source"] = "{} Nowcast".format(attributes["institution"])
        else:
            attributes["source"] = "Nowcast"
        advected_cube = create_new_diagnostic_cube(
            cube.name(), cube.units, cube, attributes, data=advected_data
        )
        amend_attributes(advected_cube, self.attributes_dict)
        set_history_attribute(advected_cube, "Nowcast")

        self._update_time(cube.coord("time").copy(), advected_cube, timestep)
        self._add_forecast_reference_time(cube.coord("time").copy(), advected_cube)
        self._add_forecast_period(advected_cube, timestep)

        return advected_cube
    def _update_blended_metadata(self, blended_cube, attributes_dict):
        """
        Update metadata after blending:
        - For cycle and model blending, set a single forecast reference time
        and period using self.cycletime_point or the latest cube contributing
        to the blend
        - Remove scalar coordinates that were previously associated with the
        blend dimension
        - Update attributes as specified via process arguments
        - Set any missing mandatory arguments to their default values
        Modifies cube in place.

        Args:
            blended_cube (iris.cube.Cube)
            attributes_dict (dict or None)
        """
        if self.blend_coord in ["forecast_reference_time", "model_id"]:
            self._set_forecast_reference_time_and_period(blended_cube)
        for coord in self.crds_to_remove:
            blended_cube.remove_coord(coord)
        if attributes_dict is not None:
            amend_attributes(blended_cube, attributes_dict)
        for attr in MANDATORY_ATTRIBUTES:
            if attr not in blended_cube.attributes:
                blended_cube.attributes[attr] = MANDATORY_ATTRIBUTE_DEFAULTS[
                    attr]
Exemple #3
0
    def process(self, cube, target_grid=None, new_name=None, new_units=None,
                regridded_title=None, coords_to_remove=None,
                attributes_dict=None, fix_float64=False):
        """
        Perform regridding and metadata adjustments

        Args:
            cube (iris.cube.Cube):
                Input cube to be standardised
            target_grid (iris.cube.Cube or None):
                Cube on the required grid. For "nearest-with-mask" regridding,
                this cube should contain a binary land-sea mask
                ("land_binary_mask"). If target_grid is None, no regridding is
                performed.
            new_name (str or None):
                Optional rename for output cube
            new_units (str or None):
                Optional unit conversion for output cube
            regridded_title (str or None):
                New title attribute to be applied after regridding. If not set,
                the title attribute is set to a default value if the field is
                regridded, as "title" may contain grid information.
            coords_to_remove (list of str or None):
                Optional list of scalar coordinates to remove from output cube
            attributes_dict (dict or None):
                Optional dictionary of required attribute updates. Keys are
                attribute names, and values are the required value or "remove".
            fix_float64 (bool):
                Flag to de-escalate float64 precision

        Returns:
            iris.cube.Cube
        """
        # regridding
        if target_grid:
            # if regridding using a land-sea mask, check this covers the source
            # grid in the required coordinates
            if self.REGRID_REQUIRES_LANDMASK[self.regrid_mode]:
                if not grid_contains_cutout(self.landmask_source_grid, cube):
                    raise ValueError(
                        "Source landmask does not match input grid")
            cube = self._regrid_to_target(cube, target_grid, regridded_title)

        # standard metadata updates
        cube = self._collapse_scalar_dimensions(cube)
        self._standardise_time_coordinates(cube)

        # optional metadata updates
        if new_name:
            cube.rename(new_name)
        if new_units:
            cube.convert_units(new_units)
        if coords_to_remove:
            self._remove_scalar_coords(cube, coords_to_remove)
        if attributes_dict:
            amend_attributes(cube, attributes_dict)

        check_cube_not_float64(cube, fix=fix_float64)

        return cube
    def _reformat_analysis_cube(self, attribute_changes):
        """
        Add forecast reference time and forecast period coordinates (if they do
        not already exist) and nowcast attributes to analysis cube
        """
        coords = [coord.name() for coord in self.analysis_cube.coords()]
        if "forecast_reference_time" not in coords:
            frt_coord = self.analysis_cube.coord("time").copy()
            frt_coord.rename("forecast_reference_time")
            self.analysis_cube.add_aux_coord(frt_coord)
        if "forecast_period" not in coords:
            self.analysis_cube.add_aux_coord(
                AuxCoord(np.array([0], dtype=np.int32), "forecast_period", "seconds")
            )

        self.analysis_cube.attributes = generate_mandatory_attributes(
            [self.analysis_cube]
        )
        self.analysis_cube.attributes["source"] = "MONOW"
        self.analysis_cube.attributes[
            "title"
        ] = "MONOW Extrapolation Nowcast on UK 2 km Standard Grid"
        set_history_attribute(self.analysis_cube, "Nowcast")
        if attribute_changes is not None:
            amend_attributes(self.analysis_cube, attribute_changes)
Exemple #5
0
 def test_basic(self):
     """Test function adds, removes and modifies attributes as expected"""
     expected_attributes = {
         "source": "IMPROVER unit tests",
         "mosg__model_configuration": "other_model",
     }
     amend_attributes(self.cube, self.metadata_dict)
     self.assertDictEqual(self.cube.attributes, expected_attributes)
Exemple #6
0
def update_blended_metadata(
    cube: Cube,
    blend_coord: str,
    coords_to_remove: Optional[List[str]] = None,
    cycletime: Optional[str] = None,
    attributes_dict: Optional[Dict[str, str]] = None,
    model_id_attr: Optional[str] = None,
) -> None:
    """
    Update metadata as required after blending
    - For cycle and model blending, set a single forecast reference time
    and period using current cycletime
    - For model blending, add attribute detailing the contributing models
    - Remove scalar coordinates that were previously associated with the
    blend dimension
    - Update attributes as specified via process arguments
    - Set any missing mandatory arguments to their default values

    Modifies cube in place.

    Args:
        cube:
            Blended cube
        blend_coord:
            Name of coordinate over which blending has been performed
        coords_to_remove:
            Name of scalar coordinates to be removed from the blended cube
        cycletime:
            Current cycletime in YYYYMMDDTHHmmZ format
        model_id_attr:
            Name of attribute for use in model blending, to record the names of
            contributing models on the blended output
        attributes_dict:
            Optional user-defined attributes to add to the cube
    """
    if blend_coord in ["forecast_reference_time", MODEL_BLEND_COORD]:
        _set_blended_time_coords(cube, cycletime)

    if blend_coord == MODEL_BLEND_COORD:
        (contributing_models,) = cube.coord(MODEL_NAME_COORD).points
        # iris concatenates string coordinates as a "|"-separated string
        cube.attributes[model_id_attr] = " ".join(
            sorted(contributing_models.split("|"))
        )

    if coords_to_remove is not None:
        for coord in coords_to_remove:
            cube.remove_coord(coord)

    if attributes_dict is not None:
        amend_attributes(cube, attributes_dict)

    for attr in MANDATORY_ATTRIBUTES:
        if attr not in cube.attributes:
            cube.attributes[attr] = MANDATORY_ATTRIBUTE_DEFAULTS[attr]
Exemple #7
0
    def process(
        self,
        cube: Cube,
        new_name: Optional[str] = None,
        new_units: Optional[str] = None,
        coords_to_remove: Optional[List[str]] = None,
        attributes_dict: Optional[Dict[str, Any]] = None,
    ) -> Cube:
        """
        Perform compulsory and user-configurable metadata adjustments.  The
        compulsory adjustments are:

        - to collapse any scalar dimensions apart from realization (which is expected
          always to be a dimension);
        - to cast the cube data and coordinates into suitable datatypes;
        - to convert time-related metadata into the required units
        - to remove cell method ("point": "time").

        Args:
            cube:
                Input cube to be standardised
            new_name:
                Optional rename for output cube
            new_units:
                Optional unit conversion for output cube
            coords_to_remove:
                Optional list of scalar coordinates to remove from output cube
            attributes_dict:
                Optional dictionary of required attribute updates. Keys are
                attribute names, and values are the required value or "remove".

        Returns:
            The processed cube
        """
        cube = self._collapse_scalar_dimensions(cube)

        if new_name:
            cube.rename(new_name)
        if new_units:
            cube.convert_units(new_units)
        if coords_to_remove:
            self._remove_scalar_coords(cube, coords_to_remove)
        if attributes_dict:
            amend_attributes(cube, attributes_dict)
        self._discard_redundant_cell_methods(cube)

        # this must be done after unit conversion as if the input is an integer
        # field, unit conversion outputs the new data as float64
        self._standardise_dtypes_and_units(cube)

        return cube
Exemple #8
0
    def process(
        self,
        cube,
        new_name=None,
        new_units=None,
        coords_to_remove=None,
        attributes_dict=None,
    ):
        """
        Perform compulsory and user-configurable metadata adjustments.  The
        compulsory adjustments are to collapse any scalar dimensions apart from
        realization (which is expected always to be a dimension); to cast the cube
        data and coordinates into suitable datatypes; and to convert time-related
        metadata into the required units.

        Args:
            cube (iris.cube.Cube):
                Input cube to be standardised
            new_name (str or None):
                Optional rename for output cube
            new_units (str or None):
                Optional unit conversion for output cube
            coords_to_remove (list of str or None):
                Optional list of scalar coordinates to remove from output cube
            attributes_dict (dict or None):
                Optional dictionary of required attribute updates. Keys are
                attribute names, and values are the required value or "remove".

        Returns:
            iris.cube.Cube
        """
        cube = self._collapse_scalar_dimensions(cube)

        if new_name:
            cube.rename(new_name)
        if new_units:
            cube.convert_units(new_units)
        if coords_to_remove:
            self._remove_scalar_coords(cube, coords_to_remove)
        if attributes_dict:
            amend_attributes(cube, attributes_dict)

        # this must be done after unit conversion as if the input is an integer
        # field, unit conversion outputs the new data as float64
        self._standardise_dtypes_and_units(cube)

        return cube
Exemple #9
0
    def _update_metadata_only(self, cube, attributes_dict, cycletime):
        """
        If blend_coord has only one value (for example cycle blending with
        only one cycle available), or is not present (case where only
        one model has been provided for a model blend), update attributes
        and time coordinates and return.
        """
        result = cube.copy()
        if attributes_dict is not None:
            amend_attributes(result, attributes_dict)

        (result,) = rebadge_forecasts_as_latest_cycle([result], cycletime)
        if self.blend_coord in ["forecast_reference_time", "model_id"]:
            for coord in ["forecast_period", "forecast_reference_time"]:
                msg = f"{coord} will be removed in future and should not be used"
                result.coord(coord).attributes.update({"deprecation_message": msg})

            if cycletime is not None:
                add_blend_time(result, cycletime)
            else:
                msg = "Current cycle time is required for cycle and model blending"
                raise ValueError(msg)

        return result
    def process(
        self,
        cubelist,
        cycletime=None,
        model_id_attr=None,
        spatial_weights=False,
        fuzzy_length=20000,
        attributes_dict=None,
    ):
        """
        Merge a cubelist, calculate appropriate blend weights and compute the
        weighted mean. Returns a single cube collapsed over the dimension
        given by self.blend_coord.

        Args:
            cubelist (iris.cube.CubeList):
                List of cubes to be merged and blended
            cycletime (str):
                Forecast reference time to use for output cubes, in the format
                YYYYMMDDTHHMMZ.  If not set, the latest of the input cube
                forecast reference times is used.
            model_id_attr (str):
                Name of the attribute by which to identify the source model and
                construct "model" coordinates for blending.
            spatial_weights (bool):
                If true, calculate spatial weights.
            fuzzy_length (float):
                Distance (in metres) over which to smooth spatial weights.
                Default is 20 km.
            attributes_dict (dict or None):
                Changes to cube attributes to be applied after blending

         Warns:
            UserWarning: If blending masked data without spatial weights.
                         This has not been fully tested.
        """
        # Prepare cubes for weighted blending, including creating model_id and
        # model_configuration coordinates for multi-model blending. The merged
        # cube has a monotonically ascending blend coordinate. Plugin raises an
        # error if blend_coord is not present on all input cubes.
        merger = MergeCubesForWeightedBlending(
            self.blend_coord,
            weighting_coord=self.weighting_coord,
            model_id_attr=model_id_attr,
        )
        cube = merger(cubelist, cycletime=cycletime)

        # if blend_coord has only one value (for example cycle blending with
        # only one cycle available), or is not present (case where only
        # one model has been provided for a model blend), update attributes
        # and ensure that the forecast reference time on the returned cube
        # is set to the current IMPROVER processing cycle.
        coord_names = [coord.name() for coord in cube.coords()]
        if (self.blend_coord not in coord_names
                or len(cube.coord(self.blend_coord).points) == 1):
            result = cube.copy()
            if attributes_dict is not None:
                amend_attributes(result, attributes_dict)
            (result, ) = rebadge_forecasts_as_latest_cycle([result], cycletime)

        # otherwise, calculate weights and blend across specified dimension
        else:
            # set up special treatment for model blending
            if "model" in self.blend_coord:
                self.blend_coord = "model_id"

            # calculate blend weights
            weights = self._calculate_blending_weights(cube)
            if spatial_weights:
                weights = self._update_spatial_weights(cube, weights,
                                                       fuzzy_length)
            elif np.ma.is_masked(cube.data):
                # Raise warning if blending masked arrays using non-spatial weights.
                warnings.warn(
                    "Blending masked data without spatial weights has not been"
                    " fully tested.")

            # blend across specified dimension
            BlendingPlugin = WeightedBlendAcrossWholeDimension(
                self.blend_coord)
            result = BlendingPlugin(
                cube,
                weights=weights,
                cycletime=cycletime,
                attributes_dict=attributes_dict,
            )

        return result
Exemple #11
0
    def process(self, cube, timestep):
        """
        Extrapolates input cube data and updates validity time.  The input
        cube should have precisely two non-scalar dimension coordinates
        (spatial x/y), and is expected to be in a projection such that grid
        spacing is the same (or very close) at all points within the spatial
        domain.  The input cube should also have a "time" coordinate.

        Args:
            cube (iris.cube.Cube):
                The 2D cube containing data to be advected
            timestep (datetime.timedelta):
                Advection time step

        Returns:
            iris.cube.Cube:
                New cube with updated time and extrapolated data.  New data
                are filled with np.nan and masked where source data were
                out of bounds (ie where data could not be advected from outside
                the cube domain).

        """
        # check that the input cube has precisely two non-scalar dimension
        # coordinates (spatial x/y) and a scalar time coordinate
        check_input_coords(cube, require_time=True)

        # check spatial coordinates match those of plugin velocities
        if (cube.coord(axis="x") != self.x_coord
                or cube.coord(axis="y") != self.y_coord):
            raise InvalidCubeError("Input data grid does not match advection "
                                   "velocities")

        # derive velocities in "grid squares per second"
        def grid_spacing(coord):
            """Calculate grid spacing along a given spatial axis"""
            new_coord = coord.copy()
            new_coord.convert_units('m')
            return np.float32(np.diff((new_coord).points)[0])

        grid_vel_x = self.vel_x.data / grid_spacing(cube.coord(axis="x"))
        grid_vel_y = self.vel_y.data / grid_spacing(cube.coord(axis="y"))

        # raise a warning if data contains unmasked NaNs
        nan_count = np.count_nonzero(~np.isfinite(cube.data))
        if nan_count > 0:
            warnings.warn("input data contains unmasked NaNs")

        # perform advection and create output cube
        advected_data = self._advect_field(cube.data, grid_vel_x, grid_vel_y,
                                           timestep.total_seconds())
        advected_cube = cube.copy(data=advected_data)

        # increment output cube time and add a "forecast_period" coordinate
        original_datetime, = \
            (cube.coord("time").units).num2date(cube.coord("time").points)
        new_datetime = original_datetime + timestep

        new_time = (cube.coord("time").units).date2num(new_datetime)

        advected_cube.coord("time").points = new_time
        advected_cube.coord("time").convert_units(
            "seconds since 1970-01-01 00:00:00")
        advected_cube.coord("time").points = (np.around(
            advected_cube.coord("time").points).astype(np.int64))

        try:
            advected_cube.coord("forecast_reference_time").convert_units(
                "seconds since 1970-01-01 00:00:00")
        except CoordinateNotFoundError:
            frt_coord = cube.coord("time").copy()
            frt_coord.rename("forecast_reference_time")
            advected_cube.add_aux_coord(frt_coord)
            advected_cube.coord("forecast_reference_time").convert_units(
                "seconds since 1970-01-01 00:00:00")

        frt_points = np.around(
            advected_cube.coord("forecast_reference_time").points).astype(
                np.int64)
        advected_cube.coord("forecast_reference_time").points = frt_points

        forecast_period_seconds = np.int32(timestep.total_seconds())
        forecast_period_coord = AuxCoord(forecast_period_seconds,
                                         standard_name="forecast_period",
                                         units="s")
        try:
            advected_cube.remove_coord("forecast_period")
        except CoordinateNotFoundError:
            pass
        advected_cube.add_aux_coord(forecast_period_coord)

        # Modify the source attribute to describe the advected field as a
        # Nowcast
        if "institution" in advected_cube.attributes.keys():
            advected_cube.attributes["source"] = ("{} Nowcast".format(
                advected_cube.attributes["institution"]))
        else:
            advected_cube.attributes["source"] = "Nowcast"
        amend_attributes(advected_cube, self.attributes_dict)
        set_history_attribute(advected_cube, "Nowcast")
        return advected_cube
Exemple #12
0
def process(temperature: cli.inputcube,
            orography: cli.inputcube = None,
            land_sea_mask: cli.inputcube = None,
            *,
            max_height_diff: float = 35,
            nbhood_radius: int = 7,
            max_lapse_rate: float = -3 * DALR,
            min_lapse_rate: float = DALR,
            dry_adiabatic=False):
    """Calculate temperature lapse rates in units of K m-1 over orography grid.

    Args:
        temperature (iris.cube.Cube):
            Air temperature data. This is required even when returning DALR,
            as this defines the grid on which lapse rates are required.
        orography (iris.cube.Cube):
            Orography data.
        land_sea_mask (iris.cube.Cube):
            Binary land-sea mask data. True for land-points, False for sea.
        max_height_diff (float):
            Maximum allowable height difference between the central point and
            points in the neighbourhood over which the lapse rate will be
            calculated.
        nbhood_radius (int):
            Radius of neighbourhood in grid points around each point. The
            neighbourhood is a square array with side length
            2*nbhood_radius + 1. The default value of 7 is from the reference
            paper (see plugin documentation).
        max_lapse_rate (float):
            Maximum lapse rate allowed, in K m-1.
        min_lapse_rate (float):
            Minimum lapse rate allowed, in K m-1.
        dry_adiabatic (bool):
            If True, returns a cube containing the dry adiabatic lapse rate
            rather than calculating the true lapse rate.

    Returns:
        iris.cube.Cube:
            Lapse rate (K m-1)

    Raises:
        ValueError: If minimum lapse rate is greater than maximum.
        ValueError: If Maximum height difference is less than zero.
        ValueError: If neighbourhood radius is less than zero.
        RuntimeError: If calculating the true lapse rate and orography or
                      land mask arguments are not given.
    """
    import numpy as np
    from improver.lapse_rate import LapseRate
    from improver.metadata.amend import amend_attributes

    attributes_dict = {
        "title": "remove",
        "source": "remove",
        "history": "remove",
        "um_version": "remove"
    }

    if dry_adiabatic:
        result = temperature.copy(data=np.full_like(temperature.data, DALR))
        result.rename('air_temperature_lapse_rate')
        result.units = U_DALR
        amend_attributes(result, attributes_dict)
        return result

    if min_lapse_rate > max_lapse_rate:
        msg = 'Minimum lapse rate specified is greater than the maximum.'
        raise ValueError(msg)

    if max_height_diff < 0:
        msg = 'Maximum height difference specified is less than zero.'
        raise ValueError(msg)

    if nbhood_radius < 0:
        msg = 'Neighbourhood radius specified is less than zero.'
        raise ValueError(msg)

    if orography is None or land_sea_mask is None:
        msg = 'Missing orography and/or land mask arguments.'
        raise RuntimeError(msg)

    result = LapseRate(max_height_diff=max_height_diff,
                       nbhood_radius=nbhood_radius,
                       max_lapse_rate=max_lapse_rate,
                       min_lapse_rate=min_lapse_rate).process(
                           temperature, orography, land_sea_mask)
    amend_attributes(result, attributes_dict)
    return result
Exemple #13
0
def process(neighbour_cube,
            diagnostic_cube,
            lapse_rate_cube=None,
            apply_lapse_rate_correction=False,
            land_constraint=False,
            minimum_dz=False,
            extract_percentiles=None,
            ecc_bounds_warning=False,
            attributes_dict=None,
            suppress_warnings=False):
    """Module to run spot data extraction.

    Extract diagnostic data from gridded fields for spot data sites. It is
    possible to apply a temperature lapse rate adjustment to temperature data
    that helps to account for differences between the spot site's real altitude
    and that of the grid point from which the temperature data is extracted.

    Args:
        neighbour_cube (iris.cube.Cube):
            Cube of spot-data neighbours and the spot site information.
        diagnostic_cube (iris.cube.Cube):
            Cube containing the diagnostic data to be extracted.
        lapse_rate_cube (iris.cube.Cube):
            Cube containing temperature lapse rates. If this cube is provided
            and a screen temperature cube is being processed, the lapse rates
            will be used to adjust the temperature to better represent each
            spot's site-altitude.
        apply_lapse_rate_correction (bool):
            If True, and a lapse rate cube has been provided, extracted
            screen temperature will be adjusted to better match the altitude
            of the spot site for which they have been extracted.
            Default is False.
        land_constraint (bool):
            If True, the neighbour cube will be interrogated for grid point
            neighbours that were identified using a land constraint. This means
            that the grid points should be land points except for sites where
            none were found within the search radius when the neighbour cube
            was created. May be used with minimum_dz.
            Default is False.
        minimum_dz (bool):
            If True, the neighbour cube will be interrogated for grid point
            neighbours that were identified using the minimum height
            difference constraint. These are grid points that were found to be
            the closest in altitude to the spot site within the search radius
            defined when the neighbour cube was created. May be used with
            land_constraint.
            Default is False.
        extract_percentiles (list or int):
            If set to a percentile value or a list of percentile values,
            data corresponding to those percentiles will be returned. For
            example [25, 50, 75] will result in the 25th, 50th and 75th
            percentiles being returned from a cube of probabilities,
            percentiles or realizations.
            Note that for percentiles inputs, the desired percentile(s) must
            exist in the input cube.
            Default is None.
        ecc_bounds_warning (bool):
            If True, where calculated percentiles are outside the ECC bounds
            range, raises a warning rather than an exception.
            Default is False.
        attributes_dict (dict):
            If provided, this dictionary can be used to modify the attributes
            of the returned cube.
            Default is None.
        suppress_warnings (bool):
            Suppress warning output. This option should only be used if it
            is known that warnings will be generated but they are not required.
            Default is None.

    Returns:
        iris.cube.Cube:
           The processed cube.

    Raises:
        ValueError:
            If the percentile diagnostic cube does not contain the requested
            percentile value.
        ValueError:
            If the lapse rate cube was provided but the diagnostic being
            processed is not air temperature.
        ValueError:
            If the lapse rate cube provided does not have the name
            "air_temperature_lapse_rate"
        ValueError:
            If the lapse rate cube does not contain a single valued height
            coordinate.

    Warns:
        warning:
           If diagnostic cube is not a known probabilistic type.
        warning:
            If a lapse rate cube was provided, but the height of the
            temperature does not match that of the data used.
        warning:
            If a lapse rate cube was not provided, but the option to apply
            the lapse rate correction was enabled.

    """
    neighbour_selection_method = NeighbourSelection(
        land_constraint=land_constraint,
        minimum_dz=minimum_dz).neighbour_finding_method_name()
    plugin = SpotExtraction(
        neighbour_selection_method=neighbour_selection_method)
    result = plugin.process(neighbour_cube, diagnostic_cube)

    # If a probability or percentile diagnostic cube is provided, extract
    # the given percentile if available. This is done after the spot-extraction
    # to minimise processing time; usually there are far fewer spot sites than
    # grid points.
    if extract_percentiles is not None:
        try:
            perc_coordinate = find_percentile_coordinate(result)
        except CoordinateNotFoundError:
            if 'probability_of_' in result.name():
                result = GeneratePercentilesFromProbabilities(
                    ecc_bounds_warning=ecc_bounds_warning).process(
                        result, percentiles=extract_percentiles)
                result = iris.util.squeeze(result)
            elif result.coords('realization', dim_coords=True):
                fast_percentile_method = (False if np.ma.isMaskedArray(
                    result.data) else True)
                result = PercentileConverter(
                    'realization',
                    percentiles=extract_percentiles,
                    fast_percentile_method=fast_percentile_method).process(
                        result)
            else:
                msg = ('Diagnostic cube is not a known probabilistic type. '
                       'The {} percentile could not be extracted. Extracting '
                       'data from the cube including any leading '
                       'dimensions.'.format(extract_percentiles))
                if not suppress_warnings:
                    warnings.warn(msg)
        else:
            constraint = [
                '{}={}'.format(perc_coordinate.name(), extract_percentiles)
            ]
            perc_result = extract_subcube(result, constraint)
            if perc_result is not None:
                result = perc_result
            else:
                msg = ('The percentile diagnostic cube does not contain the '
                       'requested percentile value. Requested {}, available '
                       '{}'.format(extract_percentiles,
                                   perc_coordinate.points))
                raise ValueError(msg)
    # Check whether a lapse rate cube has been provided and we are dealing with
    # temperature data and the lapse-rate option is enabled.
    if apply_lapse_rate_correction and lapse_rate_cube:
        if not result.name() == "air_temperature":
            msg = ("A lapse rate cube was provided, but the diagnostic being "
                   "processed is not air temperature and cannot be adjusted.")
            raise ValueError(msg)

        if not lapse_rate_cube.name() == 'air_temperature_lapse_rate':
            msg = ("A cube has been provided as a lapse rate cube but does "
                   "not have the expected name air_temperature_lapse_rate: "
                   "{}".format(lapse_rate_cube.name()))
            raise ValueError(msg)

        try:
            lapse_rate_height_coord = lapse_rate_cube.coord("height")
        except (ValueError, CoordinateNotFoundError):
            msg = ("Lapse rate cube does not contain a single valued height "
                   "coordinate. This is required to ensure it is applied to "
                   "equivalent temperature data.")
            raise ValueError(msg)

        # Check the height of the temperature data matches that used to
        # calculate the lapse rates. If so, adjust temperatures using the lapse
        # rate values.
        if diagnostic_cube.coord("height") == lapse_rate_height_coord:
            plugin = SpotLapseRateAdjust(
                neighbour_selection_method=neighbour_selection_method)
            result = plugin.process(result, neighbour_cube, lapse_rate_cube)
        elif not suppress_warnings:
            warnings.warn(
                "A lapse rate cube was provided, but the height of the "
                "temperature data does not match that of the data used "
                "to calculate the lapse rates. As such the temperatures "
                "were not adjusted with the lapse rates.")

    elif apply_lapse_rate_correction and not lapse_rate_cube:
        if not suppress_warnings:
            warnings.warn(
                "A lapse rate cube was not provided, but the option to "
                "apply the lapse rate correction was enabled. No lapse rate "
                "correction could be applied.")

    # Modify final attributes as described by provided JSON file.
    if attributes_dict:
        amend_attributes(result, attributes_dict)
    # Remove the internal model_grid_hash attribute if present.
    result.attributes.pop('model_grid_hash', None)
    return result
Exemple #14
0
    def process(self, cube1, cube2, boxsize=30):
        """
        Extracts data from input cubes, performs dimensionless advection
        displacement calculation, and creates new cubes with advection
        velocities in metres per second.  Each input cube should have precisely
        two non-scalar dimension coordinates (spatial x/y), and are expected to
        be in a projection such that grid spacing is the same (or very close)
        at all points within the spatial domain.  Each input cube must also
        have a scalar "time" coordinate.

        Args:
            cube1 (iris.cube.Cube):
                2D cube from (earlier) time 1
            cube2 (iris.cube.Cube):
                2D cube from (later) time 2
            boxsize (int):
                The side length of the square box over which to solve the
                optical flow constraint.  This should be greater than the
                data smoothing radius.

        Returns:
            (tuple): tuple containing:
                **ucube** (iris.cube.Cube):
                    2D cube of advection velocities in the x-direction
                **vcube** (iris.cube.Cube):
                    2D cube of advection velocities in the y-direction
        """
        # clear existing parameters
        self.data_smoothing_radius = None
        self.boxsize = None

        # check the nature of the input cubes, and raise a warning if they are
        # not both precipitation
        if cube1.name() != cube2.name():
            msg = 'Input cubes contain different data types {} and {}'
            raise ValueError(msg.format(cube1.name(), cube2.name()))

        data_name = cube1.name().lower()
        if "rain" not in data_name and "precipitation" not in data_name:
            msg = ('Input data are of non-precipitation type {}.  Plugin '
                   'parameters have not been tested and may not be appropriate'
                   ' for this variable.')
            warnings.warn(msg.format(cube1.name()))

        # check cubes have exactly two spatial dimension coordinates and a
        # scalar time coordinate
        check_input_coords(cube1, require_time=True)
        check_input_coords(cube2, require_time=True)

        # check cube dimensions match
        if (cube1.coord(axis="x") != cube2.coord(axis="x")
                or cube1.coord(axis="y") != cube2.coord(axis="y")):
            raise InvalidCubeError("Input cubes on unmatched grids")

        # check grids are equal area
        check_if_grid_is_equal_area(cube1)
        check_if_grid_is_equal_area(cube2)

        # convert units to mm/hr as these avoid the need to manipulate tiny
        # decimals
        try:
            cube1 = cube1.copy()
            cube2 = cube2.copy()
            cube1.convert_units('mm/hr')
            cube2.convert_units('mm/hr')
        except ValueError as err:
            msg = ('Input data are in units that cannot be converted to mm/hr '
                   'which are the required units for use with optical flow.')
            raise ValueError(msg) from err

        # check time difference is positive
        time1 = (cube1.coord("time").units).num2date(
            cube1.coord("time").points[0])
        time2 = (cube2.coord("time").units).num2date(
            cube2.coord("time").points[0])
        cube_time_diff = time2 - time1
        if cube_time_diff.total_seconds() <= 0:
            msg = "Expected positive time difference cube2 - cube1: got {} s"
            raise InvalidCubeError(msg.format(cube_time_diff.total_seconds()))

        # if time difference is greater 15 minutes, increase data smoothing
        # radius so that larger advection displacements can be resolved
        if cube_time_diff.total_seconds() > 900:
            data_smoothing_radius_km = self.data_smoothing_radius_km * (
                cube_time_diff.total_seconds() / 900.)
        else:
            data_smoothing_radius_km = self.data_smoothing_radius_km

        # calculate smoothing radius in grid square units
        new_coord = cube1.coord(axis='x').copy()
        new_coord.convert_units('km')
        grid_length_km = np.float32(np.diff((new_coord).points)[0])
        data_smoothing_radius = \
            int(data_smoothing_radius_km / grid_length_km)

        # Fail verbosely if data smoothing radius is too small and will
        # trigger silent failures downstream
        if data_smoothing_radius < 3:
            msg = ("Input data smoothing radius {} too small (minimum 3 "
                   "grid squares)")
            raise ValueError(msg.format(data_smoothing_radius))

        # Fail if self.boxsize is less than data smoothing radius
        self.boxsize = boxsize
        if self.boxsize < data_smoothing_radius:
            msg = ("Box size {} too small (should not be less than data "
                   "smoothing radius {})")
            raise ValueError(msg.format(self.boxsize, data_smoothing_radius))

        # extract 2-dimensional data arrays
        data1 = next(
            cube1.slices([cube1.coord(axis='y'),
                          cube1.coord(axis='x')])).data
        data2 = next(
            cube2.slices([cube2.coord(axis='y'),
                          cube2.coord(axis='x')])).data

        # fill any mask with 0 values so fill_values are not spread into the
        # domain when smoothing the fields.
        if np.ma.is_masked(data1):
            data1 = data1.filled(0)
        if np.ma.is_masked(data2):
            data2 = data2.filled(0)

        # if input arrays have no non-zero values, set velocities to zero here
        # and raise a warning
        if (np.allclose(data1, np.zeros(data1.shape))
                or np.allclose(data2, np.zeros(data2.shape))):
            msg = ("No non-zero data in input fields: setting optical flow "
                   "velocities to zero")
            warnings.warn(msg)
            ucomp = np.zeros(data1.shape, dtype=np.float32)
            vcomp = np.zeros(data2.shape, dtype=np.float32)
        else:
            # calculate dimensionless displacement between the two input fields
            ucomp, vcomp = self.process_dimensionless(data1, data2, 1, 0,
                                                      data_smoothing_radius)
            # convert displacements to velocities in metres per second
            for vel in [ucomp, vcomp]:
                vel *= np.float32(1000. * grid_length_km)
                vel /= cube_time_diff.total_seconds()

        # create velocity output cubes based on metadata from later input cube
        x_coord = cube2.coord(axis="x")
        y_coord = cube2.coord(axis="y")
        t_coord = cube2.coord("time")

        ucube = iris.cube.Cube(ucomp,
                               long_name="precipitation_advection_x_velocity",
                               units="m s-1",
                               dim_coords_and_dims=[(y_coord, 0),
                                                    (x_coord, 1)])
        ucube.add_aux_coord(t_coord)
        amend_attributes(ucube, self.attributes_dict)

        vcube = iris.cube.Cube(vcomp,
                               long_name="precipitation_advection_y_velocity",
                               units="m s-1",
                               dim_coords_and_dims=[(y_coord, 0),
                                                    (x_coord, 1)])
        vcube.add_aux_coord(t_coord)
        amend_attributes(vcube, self.attributes_dict)
        return ucube, vcube
Exemple #15
0
def process(temperature_cube,
            orography_cube,
            land_sea_mask_cube,
            max_height_diff=35,
            nbhood_radius=7,
            max_lapse_rate=3 * DALR,
            min_lapse_rate=DALR,
            return_dalr=False):
    """Calculate temperature lapse rates in units of K m-1 over orography grid.

    Args:
        temperature_cube (iris.cube.Cube):
            A cube of air temperature to be processed (K).
        orography_cube (iris.cube.Cube):
             A Cube containing orography data (metres).
        land_sea_mask_cube (iris.cube.Cube):
            A cube containing a binary land-sea mask.
            True for land-points.
            False for sea.
        max_height_diff (float):
            Maximum allowable height difference between the central point and
            points in the neighbourhood over which the lapse rate will be
            calculated.
            Default is 35.
        nbhood_radius (int):
            Radius of neighbourhood around each point. The neighbourhood
            will be a square array with side length 2*nbhood_radius + 1.
            The default value of 7 is from the reference paper.
        max_lapse_rate (float):
            Maximum lapse rate allowed.
            Default is 3*improver.constants.DALR.
        min_lapse_rate (float):
            Minimum lapse rate allowed.
            Default is improver.constants.DALR.
        return_dalr (bool):
            If True, returns a cube containing the dry adiabatic lapse rate
            rather than calculating the true lapse rate.

    Returns:
        iris.cube.Cube:
            Cube containing lapse rate (K m-1)

    Raises:
        ValueError:
            If minimum lapse rate is greater than maximum.
        ValueError:
            If Maximum height difference is less than zero.
        ValueError:
            If neighbourhood radius is less than zero.

    """
    if min_lapse_rate > max_lapse_rate:
        msg = 'Minimum lapse rate specified is greater than the maximum.'
        raise ValueError(msg)

    if max_height_diff < 0:
        msg = 'Maximum height difference specified is less than zero.'
        raise ValueError(msg)

    if nbhood_radius < 0:
        msg = 'Neighbourhood radius specified is less than zero.'
        raise ValueError(msg)

    if return_dalr:
        result = temperature_cube.copy(
            data=np.full_like(temperature_cube.data, U_DALR.points[0]))
        result.rename('air_temperature_lapse_rate')
        result.units = U_DALR.units
    else:
        result = LapseRate(max_height_diff=max_height_diff,
                           nbhood_radius=nbhood_radius,
                           max_lapse_rate=max_lapse_rate,
                           min_lapse_rate=min_lapse_rate).process(
                               temperature_cube, orography_cube,
                               land_sea_mask_cube)
    attributes_dict = {
        "title": "delete",
        "source": "delete",
        "history": "delete",
        "um_version": "delete"
    }
    amend_attributes(result, attributes_dict)
    return result