Exemple #1
0
    def _remove_zero_weighted_slices(self, cube: Cube,
                                     weights: Cube) -> Tuple[Cube, Cube]:
        """Removes any cube and weights slices where the 1D weighting factor
        is zero

        Args:
            cube:
                The data cube to be blended
            weights:
                1D cube of weights varying along self.blend_coord

        Returns:
            - Data cube without zero-weighted slices
            - Weights without zeroes
        """
        slice_out_vals = []
        for wslice in weights.slices_over(self.blend_coord):
            if np.sum(wslice.data) == 0:
                slice_out_vals.append(wslice.coord(self.blend_coord).points[0])

        if not slice_out_vals:
            return cube, weights

        constraint = iris.Constraint(
            coord_values={self.blend_coord: lambda x: x not in slice_out_vals})
        cube = cube.extract(constraint)
        weights = weights.extract(constraint)
        return cube, weights
Exemple #2
0
def slice_mask_cube_by_domain(
        cube_in: Cube, cube_in_mask: Cube,
        output_domain: Tuple[float, float, float, float]) -> Tuple[Cube, Cube]:
    """
    Extract cube domain to be consistent as cube_reference's domain.

    Args:
        cube_in:
            Input data cube to be sliced.
        cube_in_mask:
            Input mask cube to be sliced.
        output_domain:
            Lat_max, lon_max, lat_min, lon_min.

    Returns:
        - Data cube after slicing.
        - Mask cube after slicing.
    """
    lat_max, lon_max, lat_min, lon_min = output_domain
    lat_d_1, lon_d_1 = calculate_input_grid_spacing(cube_in)
    lat_d_2, lon_d_2 = calculate_input_grid_spacing(cube_in_mask)
    lat_d = lat_d_1 if lat_d_1 > lat_d_2 else lat_d_2
    lon_d = lon_d_1 if lon_d_1 > lon_d_2 else lon_d_2

    domain = iris.Constraint(latitude=lambda val: lat_min - 2.0 * lat_d < val <
                             lat_max + 2.0 * lat_d) & iris.Constraint(
                                 longitude=lambda val: lon_min - 2.0 * lon_d <
                                 val < lon_max + 2.0 * lon_d)

    cube_in = cube_in.extract(domain)
    cube_in_mask = cube_in_mask.extract(domain)

    return cube_in, cube_in_mask
Exemple #3
0
def apply_extraction(
    cube: Cube,
    constraint: Constraint,
    units: Optional[Dict] = None,
    use_original_units: bool = True,
) -> Cube:
    """
    Using a set of constraints, extract a subcube from the provided cube if it
    is available.

    Args:
        cube:
            The cube from which a subcube is to be extracted.
        constraint:
            The constraint or ConstraintCombination that will be used to
            extract a subcube from the input cube.
        units:
            A dictionary of units for the constraints. Supplied if any
            coordinate constraints are provided in different units from those
            of the input cube (eg precip in mm/h for cube threshold in m/s).
        use_original_units:
            Boolean to state whether the coordinates used in the extraction
            should be converted back to their original units. The default is
            True, indicating that the units should be converted back to the
            original units.

    Returns:
        A single cube matching the input constraints, or None if no subcube
        is found within cube that matches the constraints.
    """
    if units is None:
        output_cube = cube.extract(constraint)
    else:
        original_units = {}
        for coord in units.keys():
            original_units[coord] = cube.coord(coord).units
            cube.coord(coord).convert_units(units[coord])
        output_cube = cube.extract(constraint)
        if use_original_units:
            for coord in original_units:
                cube.coord(coord).convert_units(original_units[coord])
                try:
                    output_cube.coord(coord).convert_units(
                        original_units[coord])
                except AttributeError:
                    # an empty output cube (None) is handled by the CLI
                    pass

    return output_cube
Exemple #4
0
def slice_cube_by_domain(
        cube_in: Cube, output_domain: Tuple[float, float, float,
                                            float]) -> Cube:
    """
    Extract cube domain to be consistent as cube_reference's domain.

    Args:
        cube_in:
            Input data cube to be sliced.
        output_domain:
            Lat_max, lon_max, lat_min, lon_min.

    Returns:
        Data cube after slicing.
    """
    lat_max, lon_max, lat_min, lon_min = output_domain
    lat_d, lon_d = calculate_input_grid_spacing(cube_in)

    domain = iris.Constraint(latitude=lambda val: lat_min - 2.0 * lat_d < val <
                             lat_max + 2.0 * lat_d) & iris.Constraint(
                                 longitude=lambda val: lon_min - 2.0 * lon_d <
                                 val < lon_max + 2.0 * lon_d)

    cube_in = cube_in.extract(domain)

    return cube_in
    def extract_percentile_data(cube: Cube, req_percentile: float,
                                standard_name: str) -> Tuple[Cube, Coord]:
        """Extract percentile data from cube.

        Args:
            cube:
                Cube contain one or more percentiles.
            req_percentile:
                Required percentile value
            standard_name:
                Standard name of the data.

        Returns:
            - Cube containing the required percentile data
            - Percentile coordinate.
        """
        if not isinstance(cube, iris.cube.Cube):
            msg = ("Expecting {0:s} data to be an instance of "
                   "iris.cube.Cube but is"
                   " {1}.".format(standard_name, type(cube)))
            raise TypeError(msg)
        perc_coord = find_percentile_coordinate(cube)
        if cube.standard_name != standard_name:
            msg = ("Warning mismatching name for data expecting"
                   " {0:s} but found {1:s}".format(standard_name,
                                                   cube.standard_name))
            warnings.warn(msg)
        constraint = iris.Constraint(
            coord_values={perc_coord.name(): req_percentile})
        result = cube.extract(constraint)
        if result is None:
            msg = "Could not find required percentile " "{0:3.1f} in cube".format(
                req_percentile)
            raise ValueError(msg)
        return result, perc_coord
    def _find_central_point(self, cube: Cube) -> Cube:
        """
        Find the cube that contains the central point, otherwise, raise
        an exception.

        Args:
            cube:
                Cube containing input for blending.

        Returns:
            Cube containing central point.

        Raises:
            ValueError: Central point is not available within the input cube.
        """
        # Convert central point into the units of the cube, so that a
        # central point can be extracted.
        central_point = Unit(self.parameter_units).convert(
            self.central_point,
            cube.coord(self.coord).units)
        constr = iris.Constraint(
            coord_values={
                self.coord: lambda cell: cell.point == central_point
            })
        central_point_cube = cube.extract(constr)
        if central_point_cube is None:
            msg = ("The central point {} in units of {} not available "
                   "within input cube coordinate points: {}.".format(
                       self.central_point,
                       self.parameter_units,
                       cube.coord(self.coord).points,
                   ))
            raise ValueError(msg)
        return central_point_cube
Exemple #7
0
    def apply_ice(self, prob_lightning_cube: Cube, ice_cube: Cube) -> Cube:
        """
        Modify Nowcast of lightning probability with ice data from a radar
        composite (VII; Vertically Integrated Ice)

        Args:
            prob_lightning_cube:
                First-guess lightning probability.
                The forecast_period coord is modified in-place to "minutes".
            ice_cube:
                Analysis of vertically integrated ice (VII) from radar
                thresholded at self.ice_thresholds.
                Units of threshold coord modified in-place to kg m^-2

        Returns:
            Output cube containing updated nowcast lightning probability.
            This cube will have the same dimensions and meta-data as
            prob_lightning_cube.
            The influence of the data in ice_cube reduces linearly to zero
            as forecast_period increases to 2H30M.

        Raises:
            iris.exceptions.ConstraintMismatchError:
                If ice_cube does not contain the expected thresholds.
        """
        prob_lightning_cube.coord("forecast_period").convert_units("minutes")
        # check prob-ice threshold units are as expected
        ice_threshold_coord = find_threshold_coordinate(ice_cube)
        ice_threshold_coord.convert_units("kg m^-2")
        new_cube_list = iris.cube.CubeList([])
        err_string = "No matching prob(Ice) cube for threshold {}"
        for cube_slice in prob_lightning_cube.slices_over("time"):
            fcmins = cube_slice.coord("forecast_period").points[0]
            for threshold, prob_max in zip(self.ice_thresholds,
                                           self.ice_scaling):
                ice_slice = ice_cube.extract(
                    iris.Constraint(coord_values={
                        ice_threshold_coord:
                        lambda t: isclose(t.point, threshold)
                    }))
                if not isinstance(ice_slice, iris.cube.Cube):
                    raise ConstraintMismatchError(err_string.format(threshold))
                # Linearly reduce impact of ice as fcmins increases to 2H30M.
                ice_scaling = [0.0, (prob_max * (1.0 - (fcmins / 150.0)))]
                if ice_scaling[1] > 0:
                    cube_slice.data = np.maximum(
                        rescale(
                            ice_slice.data,
                            data_range=(0.0, 1.0),
                            scale_range=ice_scaling,
                            clip=True,
                        ),
                        cube_slice.data,
                    )
            new_cube_list.append(cube_slice)

        new_cube = new_cube_list.merge_cube()
        new_cube = check_cube_coordinates(prob_lightning_cube, new_cube)
        return new_cube
Exemple #8
0
    def wind_dir_decider(self, where_low_r: ndarray, wdir_cube: Cube) -> None:
        """If the wind direction is so widely scattered that the r value
           is nearly zero then this indicates that the average wind direction
           is essentially meaningless.
           We therefore substitute this meaningless average wind
           direction value for the wind direction calculated from a larger
           sample by smoothing across a neighbourhood of points before
           rerunning the main technique.
           This is invoked rarely (1 : 100 000)

        Args:
            where_low_r:
                Array of boolean values. True where original wind direction
                estimate has low confidence. These points are replaced
                according to self.backup_method
            wdir_cube:
                Contains array of wind direction data (realization, y, x)

        Uses:
            self.wdir_slice_mean:
                Containing average wind direction angle (in degrees).
            self.wdir_complex:
                3D array - wind direction angles from ensembles (in complex).
            self.r_vals_slice.data:
                2D array - Radius taken from average complex wind direction
                angle.
            self.r_thresh:
                Any r value below threshold is regarded as meaningless.
            self.realization_axis:
                Axis to collapse over.
            self.n_realizations:
                Number of realizations available in the plugin. Used to set the
                neighbourhood radius as this is used to adjust the radius again
                in the neighbourhooding plugin.

        Defines:
            self.wdir_slice_mean.data:
                2D array - Wind direction degrees where ambigious values have
                been replaced with data from first ensemble realization.
        """
        if self.backup_method == "neighbourhood":
            # Performs smoothing over a 6km square neighbourhood.
            # Then calculates the mean wind direction.
            child_class = WindDirection(backup_method="first_realization")
            child_class.wdir_complex = self.nbhood(
                wdir_cube.copy(data=self.wdir_complex)).data
            child_class.realization_axis = self.realization_axis
            child_class.wdir_slice_mean = self.wdir_slice_mean.copy()
            child_class.calc_wind_dir_mean()
            improved_values = child_class.wdir_slice_mean.data
        else:
            # Takes realization zero (control member).
            improved_values = wdir_cube.extract(
                iris.Constraint(realization=0)).data

        # If the r-value is low - substitute average wind direction value for
        # the wind direction taken from the first ensemble realization.
        self.wdir_slice_mean.data = np.where(where_low_r, improved_values,
                                             self.wdir_slice_mean.data)
Exemple #9
0
 def test_scalar_cube_coord_nomatch(self):
     # Ensure that extract is not extracting a scalar cube with scalar
     # coordinate that does not match the constraint.
     constraint = iris.Constraint(scalar_coord=1)
     cube = Cube(1, long_name='a1')
     coord = iris.coords.AuxCoord(0, long_name='scalar_coord')
     cube.add_aux_coord(coord, None)
     res = cube.extract(constraint)
     self.assertIs(res, None)
Exemple #10
0
    def process(self, spot_data_cube: Cube, neighbour_cube: Cube,
                gridded_lapse_rate_cube: Cube) -> Cube:
        """
        Extract lapse rates from the appropriate grid points and apply them to
        the spot extracted temperatures.

        The calculation is::

         lapse_rate_adjusted_temperatures = temperatures + lapse_rate *
         vertical_displacement

        Args:
            spot_data_cube:
                A spot data cube of temperatures for the spot data sites,
                extracted from the gridded temperature field. These
                temperatures will have been extracted using the same
                neighbour_cube and neighbour_selection_method that are being
                used here.
            neighbour_cube:
                The neighbour_cube that contains the grid coordinates at which
                lapse rates should be extracted and the vertical displacement
                between those grid points on the model orography and the spot
                data sites actual altitudes. This cube is only updated when
                a new site is added.
            gridded_lapse_rate_cube:
                A cube of temperature lapse rates on the same grid as that from
                which the spot data temperatures were extracted.

        Returns:
            A copy of the input spot_data_cube with the data modified by
            the lapse rates to give a better representation of the site's
            temperatures.
        """
        # Check the cubes are compatible.
        check_grid_match(
            [neighbour_cube, spot_data_cube, gridded_lapse_rate_cube])

        # Extract the lapse rates that correspond to the spot sites.
        spot_lapse_rate = SpotExtraction(
            neighbour_selection_method=self.neighbour_selection_method)(
                neighbour_cube, gridded_lapse_rate_cube)

        # Extract vertical displacements between the model orography and sites.
        method_constraint = iris.Constraint(
            neighbour_selection_method_name=self.neighbour_selection_method)
        data_constraint = iris.Constraint(
            grid_attributes_key="vertical_displacement")
        vertical_displacement = neighbour_cube.extract(method_constraint
                                                       & data_constraint)

        # Apply lapse rate adjustment to the temperature at each site.
        new_temperatures = (
            spot_data_cube.data +
            (spot_lapse_rate.data * vertical_displacement.data)).astype(
                np.float32)
        new_spot_cube = spot_data_cube.copy(data=new_temperatures)
        return new_spot_cube
Exemple #11
0
 def test_scalar_cube_coord_match(self):
     # Ensure that extract is able to extract a scalar cube according to
     # constrained scalar coordinate.
     constraint = iris.Constraint(scalar_coord=0)
     cube = Cube(1, long_name='a1')
     coord = iris.coords.AuxCoord(0, long_name='scalar_coord')
     cube.add_aux_coord(coord, None)
     res = cube.extract(constraint)
     self.assertIs(res, cube)
Exemple #12
0
def filter_non_matching_cubes(historic_forecast: Cube,
                              truth: Cube) -> Tuple[Cube, Cube]:
    """
    Provide filtering for the historic forecast and truth to make sure
    that these contain matching validity times. This ensures that any
    mismatch between the historic forecasts and truth is dealt with.

    Args:
        historic_forecast:
            Cube of historic forecasts that potentially contains
            a mismatch compared to the truth.
        truth:
            Cube of truth that potentially contains a mismatch
            compared to the historic forecasts.

    Returns:
        - Cube of historic forecasts where any mismatches with
          the truth cube have been removed.
        - Cube of truths where any mismatches with
          the historic_forecasts cube have been removed.

    Raises:
        ValueError: The filtering has found no matches in validity time
            between the historic forecasts and the truths.
    """
    matching_historic_forecasts = iris.cube.CubeList([])
    matching_truths = iris.cube.CubeList([])
    for hf_slice in historic_forecast.slices_over("time"):
        if hf_slice.coord("time").has_bounds():
            point = iris_time_to_datetime(hf_slice.coord("time"),
                                          point_or_bound="point")
            (bounds, ) = iris_time_to_datetime(hf_slice.coord("time"),
                                               point_or_bound="bound")
            coord_values = {
                "time":
                lambda cell: point[0] == cell.point and bounds[0] == cell.
                bound[0] and bounds[1] == cell.bound[1]
            }
        else:
            coord_values = {
                "time":
                iris_time_to_datetime(hf_slice.coord("time"),
                                      point_or_bound="point")
            }

        constr = iris.Constraint(coord_values=coord_values)
        truth_slice = truth.extract(constr)

        if truth_slice:
            matching_historic_forecasts.append(hf_slice)
            matching_truths.append(truth_slice)
    if not matching_historic_forecasts and not matching_truths:
        msg = ("The filtering has found no matches in validity time "
               "between the historic forecasts and the truths.")
        raise ValueError(msg)
    return (matching_historic_forecasts.merge_cube(),
            matching_truths.merge_cube())
Exemple #13
0
def extract_nearest_time_point(
    cube: Cube, dt: datetime, time_name: str = "time", allowed_dt_difference: int = 0
) -> Cube:
    """Find the nearest time point to the time point provided.

    Args:
        cube:
            Cube or CubeList that will be extracted from using the supplied
            time_point
        dt:
            Datetime representation of a time that will be used within the
            extraction from the cube supplied.
        time_name:
            Name of the "time" coordinate that will be extracted. This must be
            "time" or "forecast_reference_time".
        allowed_dt_difference:
            An int in seconds to define a limit to the maximum difference
            between the datetime provided and the time points available within
            the cube. If this limit is exceeded, then an error is raised.
            This must be defined in seconds.
            Default is 0.

    Returns:
        Cube following extraction to return the cube that is nearest
        to the time point supplied.

    Raises:
        ValueError: The requested datetime is not available within the
            allowed difference.
    """
    if time_name not in ["time", "forecast_reference_time"]:
        msg = (
            "{} is not a valid time_name. "
            "The time_name must be either "
            "'time' or 'forecast_reference_time'."
        )
        raise ValueError(msg)

    time_point = datetime_to_iris_time(dt)
    time_point_index = cube.coord(time_name).nearest_neighbour_index(time_point)
    (nearest_dt,) = iris_time_to_datetime(
        cube.coord(time_name).copy()[time_point_index]
    )
    if abs((dt - nearest_dt).total_seconds()) > allowed_dt_difference:
        msg = (
            "The datetime {} is not available within the input "
            "cube within the allowed difference {} seconds. "
            "The nearest datetime available was {}".format(
                dt, allowed_dt_difference, nearest_dt
            )
        )
        raise ValueError(msg)
    constr = iris.Constraint(coord_values={time_name: nearest_dt})
    cube = cube.extract(constr)
    return cube
Exemple #14
0
    def different_projection(self, method, ancillary_data, additional_data,
                             expected, **kwargs):
        """Test that the plugin copes with non-lat/lon grids."""

        trg_crs = None
        src_crs = ccrs.PlateCarree()
        trg_crs = ccrs.LambertConformal(central_longitude=50,
                                        central_latitude=10)
        trg_crs_iris = coord_systems.LambertConformal(central_lon=50,
                                                      central_lat=10)
        lons = self.cube.coord('longitude').points
        lats = self.cube.coord('latitude').points
        x, y = [], []
        for lon, lat in zip(lons, lats):
            x_trg, y_trg = trg_crs.transform_point(lon, lat, src_crs)
            x.append(x_trg)
            y.append(y_trg)

        new_x = AuxCoord(x,
                         standard_name='projection_x_coordinate',
                         units='m',
                         coord_system=trg_crs_iris)
        new_y = AuxCoord(y,
                         standard_name='projection_y_coordinate',
                         units='m',
                         coord_system=trg_crs_iris)

        cube = Cube(self.cube.data,
                    long_name="air_temperature",
                    dim_coords_and_dims=[(self.cube.coord('time'), 0)],
                    aux_coords_and_dims=[(new_y, 1), (new_x, 2)],
                    units="K")

        plugin = Plugin(method)
        with iris.FUTURE.context(cell_datetime_objects=True):
            cube = cube.extract(self.time_extract)
        result = plugin.process(cube, self.sites, self.neighbour_list,
                                ancillary_data, additional_data, **kwargs)

        self.assertEqual(cube.coord_system(), trg_crs_iris)
        self.assertAlmostEqual(result.data, expected)
        self.assertEqual(result.coord(axis='y').name(), 'latitude')
        self.assertEqual(result.coord(axis='x').name(), 'longitude')
        self.assertAlmostEqual(result.coord(axis='y').points, 4.74)
        self.assertAlmostEqual(result.coord(axis='x').points, 9.47)
Exemple #15
0
    def extract_coordinates(self, neighbour_cube: Cube) -> Cube:
        """
        Extract the desired set of grid coordinates that correspond to spot
        sites from the neighbour cube.

        Args:
            neighbour_cube:
                A cube containing information about the spot data sites and
                their grid point neighbours.

        Returns:
            A cube containing only the x and y grid coordinates for the
            grid point neighbours given the chosen neighbour selection
            method. The neighbour cube contains the indices stored as
            floating point values, so they are converted to integers
            in this cube.

        Raises:
            ValueError if the neighbour_selection_method expected is not found
            in the neighbour cube.
        """
        method = iris.Constraint(
            neighbour_selection_method_name=self.neighbour_selection_method
        )
        index_constraint = iris.Constraint(grid_attributes_key=["x_index", "y_index"])
        coordinate_cube = neighbour_cube.extract(method & index_constraint)
        if coordinate_cube:
            coordinate_cube.data = np.rint(coordinate_cube.data).astype(int)
            return coordinate_cube

        available_methods = neighbour_cube.coord(
            "neighbour_selection_method_name"
        ).points
        raise ValueError(
            'The requested neighbour_selection_method "{}" is not available in'
            " this neighbour_cube. Available methods are: {}.".format(
                self.neighbour_selection_method, available_methods
            )
        )
Exemple #16
0
    def process(self, spot_data_cube: Cube, neighbour_cube: Cube,
                gridded_lapse_rate_cube: Cube) -> Cube:
        """
        Extract lapse rates from the appropriate grid points and apply them to
        the spot extracted temperatures.

        The calculation is::

         lapse_rate_adjusted_temperatures = temperatures + lapse_rate *
         vertical_displacement

        Args:
            spot_data_cube:
                A spot data cube of temperatures for the spot data sites,
                extracted from the gridded temperature field. These
                temperatures will have been extracted using the same
                neighbour_cube and neighbour_selection_method that are being
                used here.
            neighbour_cube:
                The neighbour_cube that contains the grid coordinates at which
                lapse rates should be extracted and the vertical displacement
                between those grid points on the model orography and the spot
                data sites actual altitudes. This cube is only updated when
                a new site is added.
            gridded_lapse_rate_cube:
                A cube of temperature lapse rates on the same grid as that from
                which the spot data temperatures were extracted.

        Returns:
            A copy of the input spot_data_cube with the data modified by
            the lapse rates to give a better representation of the site's
            temperatures.

        Raises:
            ValueError:
                If the lapse rate cube was provided but the diagnostic being
                processed is not air temperature.
            ValueError:
                If the lapse rate cube provided does not have the name
                "air_temperature_lapse_rate"
            ValueError:
                If the lapse rate cube does not contain a single valued height
                coordinate.

        Warns:
            warning:
                If a lapse rate cube was provided, but the height of the
                temperature does not match that of the data used.
        """

        if is_probability(spot_data_cube):
            msg = (
                "Input cube has a probability coordinate which cannot be lapse "
                "rate adjusted. Input data should be in percentile or "
                "deterministic space only.")
            raise ValueError(msg)

        # Check that we are dealing with temperature data.
        if spot_data_cube.name() not in [
                "air_temperature", "feels_like_temperature"
        ]:
            msg = (
                "The diagnostic being processed is not air temperature "
                "or feels like temperature and therefore cannot be adjusted.")
            raise ValueError(msg)

        if not gridded_lapse_rate_cube.name() == "air_temperature_lapse_rate":
            msg = ("A cube has been provided as a lapse rate cube but does "
                   "not have the expected name air_temperature_lapse_rate: "
                   "{}".format(gridded_lapse_rate_cube.name()))
            raise ValueError(msg)

        try:
            lapse_rate_height_coord = gridded_lapse_rate_cube.coord("height")
        except (CoordinateNotFoundError):
            msg = ("Lapse rate cube does not contain a single valued height "
                   "coordinate. This is required to ensure it is applied to "
                   "equivalent temperature data.")
            raise CoordinateNotFoundError(msg)

        # Check the height of the temperature data matches that used to
        # calculate the lapse rates. If so, adjust temperatures using the lapse
        # rate values.
        if not spot_data_cube.coord("height") == lapse_rate_height_coord:
            raise ValueError(
                "A lapse rate cube was provided, but the height of the "
                "temperature data does not match that of the data used "
                "to calculate the lapse rates. As such the temperatures "
                "were not adjusted with the lapse rates.")

        # Check the cubes are compatible.
        check_grid_match(
            [neighbour_cube, spot_data_cube, gridded_lapse_rate_cube])

        # Extract the lapse rates that correspond to the spot sites.
        spot_lapse_rate = SpotExtraction(
            neighbour_selection_method=self.neighbour_selection_method)(
                neighbour_cube, gridded_lapse_rate_cube)

        # Extract vertical displacements between the model orography and sites.
        method_constraint = iris.Constraint(
            neighbour_selection_method_name=self.neighbour_selection_method)
        data_constraint = iris.Constraint(
            grid_attributes_key="vertical_displacement")
        vertical_displacement = neighbour_cube.extract(method_constraint
                                                       & data_constraint)

        # Apply lapse rate adjustment to the temperature at each site.
        new_spot_lapse_rate = iris.util.broadcast_to_shape(
            spot_lapse_rate.data, spot_data_cube.shape, [-1])
        new_temperatures = (
            spot_data_cube.data +
            (new_spot_lapse_rate * vertical_displacement.data)).astype(
                np.float32)
        new_spot_cube = spot_data_cube.copy(data=new_temperatures)
        return new_spot_cube
    def _create_output_cube(
        self,
        template: Cube,
        data: Union[List[float], ndarray],
        points: Union[List[float], ndarray],
        bounds: Union[List[float], ndarray],
    ) -> Cube:
        """
        Populates a template cube with data from the integration

        Args:
            template:
                Copy of upper or lower bounds cube, based on direction of
                integration
            data:
                Integrated data
            points:
                Points values for the integrated coordinate. These will not
                match the template cube if any slices were skipped in the
                integration, and therefore are used to slice the template cube
                to match the data array.
            bounds:
                Bounds values for the integrated coordinate

        Returns:
            Cube with data from integration
        """
        # extract required slices from template cube
        template = template.extract(
            iris.Constraint(coord_values={
                self.coord_name_to_integrate: lambda x: x in points
            }))

        # re-promote integrated coord to dimension coord if need be
        aux_coord_names = [coord.name() for coord in template.aux_coords]
        if self.coord_name_to_integrate in aux_coord_names:
            template = iris.util.new_axis(template,
                                          self.coord_name_to_integrate)

        # order dimensions on the template cube so that the integrated
        # coordinate is first (as this is the leading dimension on the
        # data array)
        enforce_coordinate_ordering(template, self.coord_name_to_integrate)

        # generate appropriate metadata for new cube
        attributes = generate_mandatory_attributes([template])
        coord_dtype = template.coord(self.coord_name_to_integrate).dtype
        name, units = self._generate_output_name_and_units()

        # create new cube from template
        integrated_cube = create_new_diagnostic_cube(name,
                                                     units,
                                                     template,
                                                     attributes,
                                                     data=np.array(data))

        integrated_cube.coord(self.coord_name_to_integrate).bounds = np.array(
            bounds).astype(coord_dtype)

        # re-order cube to match dimensions of input cube
        ordered_dimensions = get_dim_coord_names(self.input_cube)
        enforce_coordinate_ordering(integrated_cube, ordered_dimensions)
        return integrated_cube
Exemple #18
0
    def _modify_first_guess(
        self,
        cube: Cube,
        first_guess_lightning_cube: Cube,
        lightning_rate_cube: Cube,
        prob_precip_cube: Cube,
        prob_vii_cube: Optional[Cube] = None,
    ) -> Cube:
        """
        Modify first-guess lightning probability with nowcast data.

        Args:
            cube:
                Provides the meta-data for the Nowcast lightning probability
                output cube.
            first_guess_lightning_cube:
                First-guess lightning probability.
                Must have same x & y dimensions as cube.
                Time dimension should overlap that of cube (closest slice in
                time is used with a maximum time mismatch of 2 hours).
                This is included to allow this cube to come from a different
                modelling system, such as the UM.
            lightning_rate_cube:
                Nowcast lightning rate.
                Must have same dimensions as cube.
            prob_precip_cube:
                Nowcast precipitation probability (threshold > 0.5, 7, 35).
                Must have same other dimensions as cube.
            prob_vii_cube:
                Radar-derived vertically integrated ice content (VII).
                Must have same x and y dimensions as cube.
                Time should be a scalar coordinate.
                Must have a threshold coordinate with points matching.
                self.vii_thresholds.
                Can be <No cube> or None or anything that evaluates to False.

        Returns:
            Output cube containing Nowcast lightning probability.

        Raises:
            iris.exceptions.ConstraintMismatchError:
                If lightning_rate_cube or first_guess_lightning_cube do not
                contain the expected times.
        """
        new_cube_list = iris.cube.CubeList([])
        # Loop over required forecast validity times
        for cube_slice in cube.slices_over("time"):
            this_time = iris_time_to_datetime(
                cube_slice.coord("time").copy())[0]
            lightning_rate_slice = lightning_rate_cube.extract(
                iris.Constraint(time=this_time))
            err_string = "No matching {} cube for {}"
            if not isinstance(lightning_rate_slice, iris.cube.Cube):
                raise ConstraintMismatchError(
                    err_string.format("lightning", this_time))
            first_guess_slice = extract_nearest_time_point(
                first_guess_lightning_cube,
                this_time,
                allowed_dt_difference=7201)
            first_guess_slice = cube_slice.copy(data=first_guess_slice.data)
            first_guess_slice.coord("forecast_period").convert_units("minutes")
            fcmins = first_guess_slice.coord("forecast_period").points[0]

            # Increase prob(lightning) to Risk 2 (pl_dict[2]) when
            #   lightning nearby (lrt_lev2)
            # (and leave unchanged when condition is not met):
            first_guess_slice.data = np.where(
                (lightning_rate_slice.data >= self.lrt_lev2)
                & (first_guess_slice.data < self.pl_dict[2]),
                self.pl_dict[2],
                first_guess_slice.data,
            )

            # Increase prob(lightning) to Risk 1 (pl_dict[1]) when within
            #   lightning storm (lrt_lev1):
            # (and leave unchanged when condition is not met):
            lratethresh = self.lrt_lev1(fcmins)
            first_guess_slice.data = np.where(
                (lightning_rate_slice.data >= lratethresh)
                & (first_guess_slice.data < self.pl_dict[1]),
                self.pl_dict[1],
                first_guess_slice.data,
            )

            new_cube_list.append(first_guess_slice)

        new_prob_lightning_cube = new_cube_list.merge_cube()
        new_prob_lightning_cube = check_cube_coordinates(
            cube, new_prob_lightning_cube)

        # Apply precipitation adjustments.
        new_prob_lightning_cube = self.apply_precip(new_prob_lightning_cube,
                                                    prob_precip_cube)

        # If we have VII data, increase prob(lightning) accordingly.
        if prob_vii_cube:
            new_prob_lightning_cube = self.apply_ice(new_prob_lightning_cube,
                                                     prob_vii_cube)
        return new_prob_lightning_cube
Exemple #19
0
 def test_scalar_cube_exists(self):
     # Ensure that extract is able to extract a scalar cube.
     constraint = iris.Constraint(name='a1')
     cube = Cube(1, long_name='a1')
     res = cube.extract(constraint)
     self.assertIs(res, cube)
Exemple #20
0
    def apply_precip(self, prob_lightning_cube: Cube,
                     prob_precip_cube: Cube) -> Cube:
        """
        Modify Nowcast of lightning probability with precipitation rate
        probabilities at thresholds of 0.5, 7 and 35 mm/h.

        Args:
            prob_lightning_cube:
                First-guess lightning probability.

            prob_precip_cube:
                Nowcast precipitation probability
                (threshold > 0.5, 7., 35. mm hr-1)
                Units of threshold coord modified in-place to mm hr-1

        Returns:
            Output cube containing updated nowcast lightning probability.
            This cube will have the same dimensions and meta-data as
            prob_lightning_cube.

        Raises:
            iris.exceptions.ConstraintMismatchError:
                If prob_precip_cube does not contain the expected thresholds.
        """
        new_cube_list = iris.cube.CubeList([])
        # check prob-precip threshold units are as expected
        precip_threshold_coord = find_threshold_coordinate(prob_precip_cube)
        precip_threshold_coord.convert_units("mm hr-1")
        # extract precipitation probabilities at required thresholds
        for cube_slice in prob_lightning_cube.slices_over("time"):
            this_time = iris_time_to_datetime(
                cube_slice.coord("time").copy())[0]
            this_precip = prob_precip_cube.extract(
                iris.Constraint(time=this_time)
                & iris.Constraint(coord_values={
                    precip_threshold_coord:
                    lambda t: isclose(t.point, 0.5)
                }))
            high_precip = prob_precip_cube.extract(
                iris.Constraint(time=this_time)
                & iris.Constraint(coord_values={
                    precip_threshold_coord:
                    lambda t: isclose(t.point, 7.0)
                }))
            torr_precip = prob_precip_cube.extract(
                iris.Constraint(time=this_time)
                & iris.Constraint(coord_values={
                    precip_threshold_coord:
                    lambda t: isclose(t.point, 35.0)
                }))
            err_string = "No matching {} cube for {}"
            if not isinstance(this_precip, iris.cube.Cube):
                raise ConstraintMismatchError(
                    err_string.format("any precip", this_time))
            if not isinstance(high_precip, iris.cube.Cube):
                raise ConstraintMismatchError(
                    err_string.format("high precip", this_time))
            if not isinstance(torr_precip, iris.cube.Cube):
                raise ConstraintMismatchError(
                    err_string.format("intense precip", this_time))
            # Increase prob(lightning) to Risk 2 (pl_dict[2]) when
            #   prob(precip > 7mm/hr) > phighthresh
            cube_slice.data = np.where(
                (high_precip.data >= self.phighthresh)
                & (cube_slice.data < self.pl_dict[2]),
                self.pl_dict[2],
                cube_slice.data,
            )
            # Increase prob(lightning) to Risk 1 (pl_dict[1]) when
            #   prob(precip > 35mm/hr) > ptorrthresh
            cube_slice.data = np.where(
                (torr_precip.data >= self.ptorrthresh)
                & (cube_slice.data < self.pl_dict[1]),
                self.pl_dict[1],
                cube_slice.data,
            )

            # Decrease prob(lightning) where prob(precip > 0.5 mm hr-1) is low.
            cube_slice.data = apply_double_scaling(this_precip, cube_slice,
                                                   self.precipthr,
                                                   self.ltngthr)

            new_cube_list.append(cube_slice)

        new_cube = new_cube_list.merge_cube()
        new_cube = check_cube_coordinates(prob_lightning_cube, new_cube)
        return new_cube
Exemple #21
0
 def test_1d_cube_exists(self):
     # Ensure that extract is able to extract from a 1d cube.
     constraint = iris.Constraint(name='a1')
     cube = Cube([1], long_name='a1')
     res = cube.extract(constraint)
     self.assertIs(res, cube)
Exemple #22
0
    def process(self, input_cube: Cube) -> Cube:
        """
        Calculates a field of texture to use in differentiating solid and
        more scattered features.

        Args:
            input_cube:
                Input data in cube format containing the field for which the
                texture is to be assessed.

        Returns:
            A cube containing either the mean across realization of the
            thresholded ratios to give the field texture, if a realization
            coordinate is present, or the thresholded ratios directly, if
            no realization coordinate is present.
        """

        values = np.unique(input_cube.data)
        non_binary = np.where((values != 0) & (values != 1), True, False)
        if non_binary.any():
            raise ValueError("Incorrect input. Cube should hold binary data only")

        # Create new cube name for _calculate_ratio method.
        cube_name = find_threshold_coordinate(input_cube).name()
        # Extract threshold from input data to work with, taking into account floating
        # point comparisons.
        cube = input_cube.extract(
            iris.Constraint(
                coord_values={
                    cube_name: lambda cell: np.isclose(
                        cell.point, self.diagnostic_threshold
                    )
                }
            )
        )
        try:
            cube.remove_coord(cube_name)
        except AttributeError:
            msg = "Threshold {} is not present on coordinate with values {} {}"
            raise ValueError(
                msg.format(
                    self.diagnostic_threshold,
                    input_cube.coord(cube_name).points,
                    input_cube.coord(cube_name).units,
                )
            )
        ratios = iris.cube.CubeList()

        try:
            cslices = cube.slices_over("realization")
        except CoordinateNotFoundError:
            cslices = [cube]

        for cslice in cslices:
            ratios.append(self._calculate_ratio(cslice, cube_name, self.nbhood_radius))

        ratios = ratios.merge_cube()
        thresholded = BasicThreshold(self.textural_threshold).process(ratios)

        # Squeeze scalar threshold coordinate.
        try:
            field_texture = iris.util.squeeze(collapse_realizations(thresholded))
        except CoordinateNotFoundError:
            field_texture = iris.util.squeeze(thresholded)

        return field_texture
Exemple #23
0
 def test_scalar_cube_noexists(self):
     # Ensure that extract does not return a non-matching scalar cube.
     constraint = iris.Constraint(name='a2')
     cube = Cube(1, long_name='a1')
     res = cube.extract(constraint)
     self.assertIs(res, None)
    def setUp(self):
        """Create a cube containing a regular lat-lon grid.

        Data is formatted to increase linearly in x/y dimensions,
        e.g.
              0 1 2 3
              1 2 3 4
              2 3 4 5
              3 4 5 6
        """

        data = np.arange(0, 20, 1)
        for i in range(1, 20):
            data = np.append(data, np.arange(i, 20 + i))

        data.resize(1, 20, 20)
        latitudes = np.linspace(-90, 90, 20)
        longitudes = np.linspace(-180, 180, 20)
        latitude = DimCoord(latitudes,
                            standard_name='latitude',
                            units='degrees',
                            coord_system=GeogCS(6371229.0))
        longitude = DimCoord(longitudes,
                             standard_name='longitude',
                             units='degrees',
                             coord_system=GeogCS(6371229.0))

        # Use time of 2017-02-17 06:00:00
        time = DimCoord([1487311200],
                        standard_name='time',
                        units=cf_units.Unit(
                            'seconds since 1970-01-01 00:00:00',
                            calendar='gregorian'))

        time_dt = dt(2017, 2, 17, 6, 0)
        time_extract = Constraint(time=PartialDateTime(
            time_dt.year, time_dt.month, time_dt.day, time_dt.hour))
        forecast_ref_time = time[0].copy()
        forecast_ref_time.rename('forecast_reference_time')

        height = AuxCoord([1.5], standard_name='height', units='m')

        cube = Cube(data,
                    standard_name="air_temperature",
                    dim_coords_and_dims=[(time, 0), (latitude, 1),
                                         (longitude, 2)],
                    units="K")
        cube.add_aux_coord(forecast_ref_time)
        cube.add_aux_coord(height)
        cube.attributes['institution'] = 'Met Office'

        orography = Cube(np.ones((20, 20)),
                         long_name="surface_altitude",
                         dim_coords_and_dims=[(latitude, 0), (longitude, 1)],
                         units="m")

        # Western half of grid at altitude 0, eastern half at 10.
        # Note that the pressure_on_height_levels data is left unchanged,
        # so it is as if there is a sharp front running up the grid with
        # differing pressures on either side at equivalent heights above
        # the surface (e.g. east 1000hPa at 0m AMSL, west 1000hPa at 10m AMSL).
        # So there is higher pressure in the west.
        orography.data[0:10] = 0
        orography.data[10:] = 10
        ancillary_data = {}
        ancillary_data.update({'orography': orography})

        # Create additional vertical data used to calculate temperature lapse
        # rates from model levels.

        t_level0 = np.ones((1, 20, 20)) * 20.
        t_level1 = np.ones((1, 20, 20)) * 10.
        t_level2 = np.ones((1, 20, 20)) * 0.
        t_data = np.vstack((t_level0, t_level1, t_level2))
        t_data.resize((1, 3, 20, 20))

        p_level0 = np.ones((1, 20, 20)) * 1000.
        p_level1 = np.ones((1, 20, 20)) * 900.
        p_level2 = np.ones((1, 20, 20)) * 800.
        p_data = np.vstack((p_level0, p_level1, p_level2))
        p_data.resize((1, 3, 20, 20))

        height = DimCoord([0., 50., 100.], standard_name='height', units='m')

        temperature_on_height_levels = Cube(
            t_data,
            long_name="temperature_on_height_levels",
            dim_coords_and_dims=[(time, 0), (height, 1), (latitude, 2),
                                 (longitude, 3)],
            units="degree_Celsius")

        pressure_on_height_levels = Cube(p_data,
                                         long_name="pressure_on_height_levels",
                                         dim_coords_and_dims=[(time, 0),
                                                              (height, 1),
                                                              (latitude, 2),
                                                              (longitude, 3)],
                                         units="hPa")

        surface_pressure = Cube(p_data[0, 0].reshape(1, 20, 20),
                                long_name="surface_pressure",
                                dim_coords_and_dims=[(time, 0), (latitude, 1),
                                                     (longitude, 2)],
                                units="hPa")

        with iris.FUTURE.context(cell_datetime_objects=True):
            ad = {
                'temperature_on_height_levels':
                temperature_on_height_levels.extract(time_extract),
                'pressure_on_height_levels':
                pressure_on_height_levels.extract(time_extract),
                'surface_pressure':
                surface_pressure.extract(time_extract)
            }

        sites = OrderedDict()
        sites.update({
            '100': {
                'latitude': 4.74,
                'longitude': 9.47,
                'altitude': 10,
                'utc_offset': 0,
                'wmo_site': 0
            }
        })

        neighbour_list = np.empty(1,
                                  dtype=[('i', 'i8'), ('j', 'i8'),
                                         ('dz', 'f8'), ('edgepoint', 'bool_')])

        neighbour_list[0] = 10, 10, 0, False

        self.kwargs = {
            'upper_level': 2,
            'lower_level': 1,
            'dz_tolerance': 2.,
            'dthetadz_threshold': 0.02,
            'dz_max_adjustment': 70.
        }

        self.cube = cube
        self.ancillary_data = ancillary_data
        self.ad = ad
        self.sites = sites
        self.time_extract = time_extract
        self.neighbour_list = neighbour_list
        self.latitudes = latitudes
        self.latitude = latitude
        self.forecast_ref_time = forecast_ref_time
Exemple #25
0
def subset_data(
    cube: Cube,
    grid_spec: Optional[Dict[str, Dict[str, int]]] = None,
    site_list: Optional[List] = None,
) -> Cube:
    """Extract a spatial cutout or subset of sites from data
    to generate suite reference outputs.

    Args:
        cube:
            Input dataset
        grid_spec:
            Dictionary containing bounding grid points and an integer "thinning
            factor" for each of UK and global grid, to create cutouts.  Eg a
            "thinning factor" of 10 would mean every 10th point being taken for
            the cutout.  The expected dictionary has keys that are spatial coordinate
            names, with values that are dictionaries with "min", "max" and "thin" keys.
        site_list:
            List of WMO site IDs to extract.  These IDs must match the type and format
            of the "wmo_id" coordinate on the input spot cube.

    Returns:
        Subset of input cube as specified by input constraints

    Raises:
        ValueError:
            If site_list is not provided for a spot data cube
        ValueError:
            If the spot data cube does not contain any of the required sites
        ValueError:
            If grid_spec is not provided for a gridded cube
        ValueError:
            If grid_spec does not contain entries for the spatial coordinates on
            the input gridded data
        ValueError:
            If the grid_spec provided does not overlap with the cube domain
    """
    if cube.coords("spot_index"):
        if site_list is None:
            raise ValueError("site_list required to extract from spot data")

        constraint = Constraint(
            coord_values={"wmo_id": lambda x: x in site_list})
        result = cube.extract(constraint)
        if result is None:
            raise ValueError(
                f"Cube does not contain any of the required sites: {site_list}"
            )

    else:
        if grid_spec is None:
            raise ValueError("grid_spec required to extract from gridded data")

        x_coord = cube.coord(axis="x").name()
        y_coord = cube.coord(axis="y").name()

        for coord in [y_coord, x_coord]:
            if coord not in grid_spec:
                raise ValueError(
                    f"Cube coordinates {y_coord}, {x_coord} are not present within "
                    f"{grid_spec.keys()}")

        def _create_cutout(cube, grid_spec):
            """Given a gridded data cube and boundary limits for cutout dimensions,
            create cutout.  Expects cube on either lat-lon or equal area grid.
            """
            x_coord = cube.coord(axis="x").name()
            y_coord = cube.coord(axis="y").name()

            xmin = grid_spec[x_coord]["min"]
            xmax = grid_spec[x_coord]["max"]
            ymin = grid_spec[y_coord]["min"]
            ymax = grid_spec[y_coord]["max"]

            # need to use cube intersection for circular coordinates (longitude)
            if x_coord == "longitude":
                lat_constraint = Constraint(
                    latitude=lambda y: ymin <= y.point <= ymax)
                cutout = cube.extract(lat_constraint)
                if cutout is None:
                    return cutout

                cutout = cutout.intersection(longitude=(xmin, xmax),
                                             ignore_bounds=True)

                # intersection creates a new coordinate with default datatype - we
                # therefore need to re-cast to meet the IMPROVER standard
                cutout.coord("longitude").points = cutout.coord(
                    "longitude").points.astype(FLOAT_DTYPE)
                if cutout.coord("longitude").bounds is not None:
                    cutout.coord("longitude").bounds = cutout.coord(
                        "longitude").bounds.astype(FLOAT_DTYPE)

            else:
                x_constraint = Constraint(
                    projection_x_coordinate=lambda x: xmin <= x.point <= xmax)
                y_constraint = Constraint(
                    projection_y_coordinate=lambda y: ymin <= y.point <= ymax)
                cutout = cube.extract(x_constraint & y_constraint)

            return cutout

        cutout = _create_cutout(cube, grid_spec)

        if cutout is None:
            raise ValueError(
                "Cube domain does not overlap with cutout specified:\n"
                f"{x_coord}: {grid_spec[x_coord]}, {y_coord}: {grid_spec[y_coord]}"
            )

        original_coords = get_dim_coord_names(cutout)
        thin_x = grid_spec[x_coord]["thin"]
        thin_y = grid_spec[y_coord]["thin"]
        result_list = CubeList()
        try:
            for subcube in cutout.slices([y_coord, x_coord]):
                result_list.append(subcube[::thin_y, ::thin_x])
        except ValueError as cause:
            # error is raised if X or Y coordinate are single-valued (non-dimensional)
            if "iterator" in str(cause) and "dimension" in str(cause):
                raise ValueError(
                    "Function does not support single point extraction")
            else:
                raise

        result = result_list.merge_cube()
        enforce_coordinate_ordering(result, original_coords)

    return result
Exemple #26
0
def apply_extraction(
    cube: Cube,
    constraint: Constraint,
    units: Optional[Dict] = None,
    use_original_units: bool = True,
    longitude_constraint: Optional[List] = None,
) -> Cube:
    """
    Using a set of constraints, extract a subcube from the provided cube if it
    is available.

    Args:
        cube:
            The cube from which a subcube is to be extracted.
        constraint:
            The constraint or ConstraintCombination that will be used to
            extract a subcube from the input cube.
        units:
            A dictionary of units for the constraints. Supplied if any
            coordinate constraints are provided in different units from those
            of the input cube (eg precip in mm/h for cube threshold in m/s).
        use_original_units:
            Boolean to state whether the coordinates used in the extraction
            should be converted back to their original units. The default is
            True, indicating that the units should be converted back to the
            original units.
        longitude_constraint:
            List containing the min and max values for the longitude.
            This has to be treated separately to the normal constraints due
            to the circular nature of longitude.

    Returns:
        A single cube matching the input constraints, or None if no subcube
        is found within cube that matches the constraints.
    """
    if units is None:
        output_cube = cube.extract(constraint)
    else:
        original_units = {}
        for coord in units.keys():
            original_units[coord] = cube.coord(coord).units
            cube.coord(coord).convert_units(units[coord])
        output_cube = cube.extract(constraint)
        if use_original_units:
            for coord in original_units:
                cube.coord(coord).convert_units(original_units[coord])
                try:
                    output_cube.coord(coord).convert_units(original_units[coord])
                except AttributeError:
                    # an empty output cube (None) is handled by the CLI
                    pass

    if longitude_constraint:
        output_cube = output_cube.intersection(
            longitude=longitude_constraint, ignore_bounds=True
        )
        # TODO: Below can be removed when https://github.com/SciTools/iris/issues/4119
        # is fixed
        output_cube.coord("longitude").points = output_cube.coord(
            "longitude"
        ).points.astype(FLOAT_DTYPE)
        if output_cube.coord("longitude").bounds is not None:
            output_cube.coord("longitude").bounds = output_cube.coord(
                "longitude"
            ).bounds.astype(FLOAT_DTYPE)

    return output_cube