Exemplo n.º 1
0
def get_coords_to_remove(cube: Cube, blend_coord: str) -> Optional[List[str]]:
    """
    Generate a list of coordinate names associated with the blend
    dimension.  Unless these are time-related coordinates, they should be
    removed after blending.

    Args:
        cube:
            Cube to be blended
        blend_coord:
            Name of coordinate over which the blend will be performed

    Returns:
        List of names of coordinates to remove
    """
    try:
        (blend_dim, ) = cube.coord_dims(blend_coord)
    except ValueError:
        # occurs if the blend coordinate is scalar
        if blend_coord == MODEL_BLEND_COORD:
            return [MODEL_BLEND_COORD, MODEL_NAME_COORD]
        return None

    crds_to_remove = []
    for coord in cube.coords():
        if coord.name() in TIME_COORDS:
            continue
        if blend_dim in cube.coord_dims(coord):
            crds_to_remove.append(coord.name())
    return crds_to_remove
Exemplo n.º 2
0
def metadata_ok(updraught: Cube, baseline: Cube, model_id_attr=None) -> None:
    """
    Checks updraught Cube long_name, units and dtype are as expected.
    Compares updraught Cube with baseline to make sure everything else matches.

    Args:
        updraught: Result of VerticalUpdraught plugin
        baseline: A Precip or similar cube with the same coordinates and attributes.

    Raises:
        AssertionError: If anything doesn't match
    """
    assert updraught.long_name == "maximum_vertical_updraught"
    assert updraught.units == "m s-1"
    assert updraught.dtype == np.float32
    for coord in updraught.coords():
        base_coord = baseline.coord(coord.name())
        assert updraught.coord_dims(coord) == baseline.coord_dims(base_coord)
        assert coord == base_coord
    for attr in MANDATORY_ATTRIBUTES:
        assert updraught.attributes[attr] == baseline.attributes[attr]
    all_attr_keys = list(updraught.attributes.keys())
    if model_id_attr:
        assert updraught.attributes[model_id_attr] == baseline.attributes[
            model_id_attr]
        mandatory_attr_keys = [k for k in all_attr_keys if k != model_id_attr]
    else:
        mandatory_attr_keys = all_attr_keys
    assert sorted(mandatory_attr_keys) == sorted(MANDATORY_ATTRIBUTES)
Exemplo n.º 3
0
def _create_cube_with_padded_data(source_cube: Cube, data: ndarray,
                                  coord_x: DimCoord,
                                  coord_y: DimCoord) -> Cube:
    """
    Create a cube with newly created data where the metadata is copied from
    the input cube and the supplied x and y coordinates are added to the
    cube.

    Args:
        source_cube:
            Template cube used for copying metadata and non x and y axes
            coordinates.
        data:
            Data to be put into the new cube.
        coord_x:
            Coordinate to be added to the new cube to represent the x axis.
        coord_y:
            Coordinate to be added to the new cube to represent the y axis.

    Returns:
        Cube built from the template cube using the requested data and
        the supplied x and y axis coordinates.
    """
    check_for_x_and_y_axes(source_cube)

    yname = source_cube.coord(axis="y").name()
    xname = source_cube.coord(axis="x").name()
    ycoord_dim = source_cube.coord_dims(yname)
    xcoord_dim = source_cube.coord_dims(xname)

    # inherit metadata (cube name, units, attributes etc)
    metadata_dict = deepcopy(source_cube.metadata._asdict())
    new_cube = iris.cube.Cube(data, **metadata_dict)

    # inherit non-spatial coordinates
    for coord in source_cube.coords():
        if coord.name() not in [yname, xname]:
            if source_cube.coords(coord, dim_coords=True):
                coord_dim = source_cube.coord_dims(coord)
                new_cube.add_dim_coord(coord, coord_dim)
            else:
                new_cube.add_aux_coord(coord)

    # update spatial coordinates
    if len(xcoord_dim) > 0:
        new_cube.add_dim_coord(coord_x, xcoord_dim)
    else:
        new_cube.add_aux_coord(coord_x)

    if len(ycoord_dim) > 0:
        new_cube.add_dim_coord(coord_y, ycoord_dim)
    else:
        new_cube.add_aux_coord(coord_y)

    return new_cube
Exemplo n.º 4
0
    def build_weights_cube(cube: Cube, weights: ndarray, blending_coord: str,) -> Cube:
        """Build a cube containing weights for use in blending.

        Args:
            cube:
                The cube that is being blended over blending_coord.
            weights:
                Array of weights
            blending_coord:
                Name of the coordinate over which the weights will be used
                to blend data, e.g. across model name when grid blending.

        Returns:
            A cube containing the array of weights.

        Raises:
            ValueError : If weights array is not of the same length as the
                         coordinate being blended over on cube.
        """

        if len(weights) != len(cube.coord(blending_coord).points):
            msg = (
                "Weights array provided is not the same size as the "
                "blending coordinate; weights shape: {}, blending "
                "coordinate shape: {}".format(
                    len(weights), len(cube.coord(blending_coord).points)
                )
            )
            raise ValueError(msg)

        try:
            weights_cube = next(cube.slices(blending_coord))
        except ValueError:
            weights_cube = iris.util.new_axis(cube, blending_coord)
            weights_cube = next(weights_cube.slices(blending_coord))
        weights_cube.attributes = None
        # Find dim associated with blending_coord and don't remove any coords
        # associated with this dimension.
        blending_dim = cube.coord_dims(blending_coord)
        defunct_coords = [
            crd.name()
            for crd in cube.coords(dim_coords=True)
            if not cube.coord_dims(crd) == blending_dim
        ]
        for crd in defunct_coords:
            weights_cube.remove_coord(crd)
        weights_cube.data = weights
        weights_cube.rename("weights")
        weights_cube.units = 1

        return weights_cube
Exemplo n.º 5
0
 def _check_coords_are_horizontal(self, cube: Cube, coords: List[str]) -> None:
     """Checks that all the mentioned coords share the same dimensions as the x and y coords"""
     y_coord, x_coord = (cube.coord(axis=n) for n in "yx")
     horizontal_dims = set([cube.coord_dims(n)[0] for n in [y_coord, x_coord]])
     for coord in coords:
         try:
             coord_dims = set(cube.coord_dims(coord))
         except CoordinateNotFoundError:
             # The presence of coords is checked elsewhere
             continue
         if coord_dims != horizontal_dims:
             self.errors.append(
                 f"Coordinate {coord} does not span all horizontal coordinates"
             )
Exemplo n.º 6
0
def find_blend_dim_coord(cube: Cube, blend_coord: str) -> str:
    """
    Find the name of the dimension coordinate across which to perform the blend,
    since the input "blend_coord" may be an auxiliary coordinate.

    Args:
        cube:
            Cube to be blended
        blend_coord:
            Name of coordinate to blend over

    Returns:
        Name of dimension coordinate associated with blend dimension

    Raises:
        ValueError:
            If blend coordinate is associated with more or less than one dimension
    """
    blend_dim = cube.coord_dims(blend_coord)
    if len(blend_dim) != 1:
        if len(blend_dim) < 1:
            msg = f"Blend coordinate {blend_coord} has no associated dimension"
        else:
            msg = (
                "Blend coordinate must only be across one dimension. Coordinate "
                f"{blend_coord} is associated with dimensions {blend_dim}.")
        raise ValueError(msg)

    return cube.coord(dimensions=blend_dim[0], dim_coords=True).name()
Exemplo n.º 7
0
def _set_blended_time_coords(blended_cube: Cube,
                             cycletime: Optional[str]) -> None:
    """
    For cycle and model blending:
    - Add a "blend_time" coordinate equal to the current cycletime
    - Update the forecast reference time and forecast period coordinate points
    to reflect the current cycle time (behaviour is DEPRECATED)
    - Remove any bounds from the forecast reference time (behaviour is DEPRECATED)
    - Mark the forecast reference time and forecast period as DEPRECATED

    Modifies cube in place.

    Args:
        blended_cube
        cycletime:
            Current cycletime in YYYYMMDDTHHmmZ format
    """
    try:
        cycletime_point = _get_cycletime_point(blended_cube, cycletime)
    except TypeError:
        raise ValueError(
            "Current cycle time is required for cycle and model blending")

    add_blend_time(blended_cube, cycletime)
    blended_cube.coord("forecast_reference_time").points = [cycletime_point]
    blended_cube.coord("forecast_reference_time").bounds = None
    if blended_cube.coords("forecast_period"):
        blended_cube.remove_coord("forecast_period")
    new_forecast_period = forecast_period_coord(blended_cube)
    time_dim = blended_cube.coord_dims("time")
    blended_cube.add_aux_coord(new_forecast_period, data_dims=time_dim)
    for coord in ["forecast_period", "forecast_reference_time"]:
        msg = f"{coord} will be removed in future and should not be used"
        blended_cube.coord(coord).attributes.update(
            {"deprecation_message": msg})
Exemplo n.º 8
0
    def check_percentile_coord(cube: Cube) -> bool:
        """
        Determines if the cube to be blended has a percentile dimension
        coordinate.

        Args:
            cube:
                The cube to be checked for a percentile coordinate.

        Returns:
            True if there is a multi-valued percentile dimension; False if not

        Raises:
            ValueError : If there is a percentile coord and it is not a
                dimension coord in the cube.
            ValueError : If there is a percentile dimension with only one
                point, we need at least two points in order to do the blending.
        """
        try:
            perc_coord = cube.coord(PERC_COORD)
            perc_dim = cube.coord_dims(PERC_COORD)
            if not perc_dim:
                msg = "The percentile coord must be a dimension of the cube."
                raise ValueError(msg)
            # Check the percentile coordinate has more than one point,
            # otherwise raise an error as we won't be able to blend.
            if len(perc_coord.points) < 2.0:
                msg = (
                    "Percentile coordinate does not have enough points"
                    " in order to blend. Must have at least 2 percentiles."
                )
                raise ValueError(msg)
            return True
        except CoordinateNotFoundError:
            return False
Exemplo n.º 9
0
def sort_coord_in_cube(cube: Cube,
                       coord: str,
                       descending: bool = False) -> Cube:
    """Sort a cube based on the ordering within the chosen coordinate.
    Sorting can either be in ascending or descending order.
    This code is based upon https://gist.github.com/pelson/9763057.

    Args:
        cube:
            The input cube to be sorted.
        coord:
            Name of the coordinate to be sorted.
        descending:
            If True it will be sorted in descending order.

    Returns:
        Cube where the chosen coordinate has been sorted into either
        ascending or descending order.

    Warns:
        Warning if the coordinate being processed is a circular coordinate.
    """
    coord_to_sort = cube.coord(coord)
    if isinstance(coord_to_sort, DimCoord):
        if coord_to_sort.circular:
            msg = ("The {} coordinate is circular. If the values in the "
                   "coordinate span a boundary then the sorting may return "
                   "an undesirable result.".format(coord_to_sort.name()))
            warnings.warn(msg)
    (dim, ) = cube.coord_dims(coord_to_sort)
    index = [slice(None)] * cube.ndim
    index[dim] = np.argsort(coord_to_sort.points)
    if descending:
        index[dim] = index[dim][::-1]
    return cube[tuple(index)]
Exemplo n.º 10
0
def flatten_spatial_dimensions(
    cube: Cube, ) -> Tuple[Union[ndarray, MaskedArray], int, int]:
    """
    Reshape data cube from (....,lat,lon) into data (lat*lon,...).

    Args:
        cube:
            Original data cube.

    Returns:
        - Reshaped data array.
        - Index of latitude cube coords.
        - Index of longitude cube coords.
    """
    in_values = cube.data
    lats_name, lons_name = latlon_names(cube)
    lats_index = cube.coord_dims(lats_name)[0]
    lons_index = cube.coord_dims(lons_name)[0]

    in_values = np.swapaxes(in_values, 0, lats_index)
    in_values = np.swapaxes(in_values, 1, lons_index)

    lats_len = int(in_values.shape[0])
    lons_len = int(in_values.shape[1])
    latlon_shape = [lats_len * lons_len] + list(in_values.shape[2:])
    in_values = np.reshape(in_values, latlon_shape)
    return in_values, lats_index, lons_index
Exemplo n.º 11
0
    def shape_weights(cube: Cube, weights: Cube) -> ndarray:
        """
        The function shapes weights to match the diagnostic cube. A cube of
        weights that vary across the blending coordinate will be broadcast to
        match the complete multidimensional cube shape. A multidimensional cube
        of weights will be checked to ensure that the coordinate names match
        between the two cubes. If they match the order will be enforced and
        then the shape will be checked. If the shapes match the weights will be
        returned as an array.

        Args:
            cube:
                The data cube on which a coordinate is being blended.
            weights:
                Cube of blending weights.

        Returns:
            An array of weights that matches the cube data shape.

        Raises:
            ValueError: If weights cube coordinates do not match the diagnostic
                        cube in the case of a multidimensional weights cube.
            ValueError: If weights cube shape is not broadcastable to the data
                        cube shape.
        """
        # Check that a multidimensional weights cube has coordinates that match
        # the diagnostic cube. Checking names only to not to be too exacting.
        weight_dims = get_dim_coord_names(weights)
        cube_dims = get_dim_coord_names(cube)
        if set(weight_dims) == set(cube_dims):
            enforce_coordinate_ordering(weights, cube_dims)
            weights_array = weights.data.astype(FLOAT_DTYPE)
        else:
            # Map array of weights to shape of cube to collapse.
            dim_map = []
            dim_coords = [coord.name() for coord in weights.dim_coords]
            # Loop through dim coords in weights cube and find the dim the
            # coord relates to in the cube we are collapsing.
            for dim_coord in dim_coords:
                try:
                    dim_map.append(cube.coord_dims(dim_coord)[0])
                except CoordinateNotFoundError:
                    message = (
                        "{} is a coordinate on the weights cube but it is not "
                        "found on the cube we are trying to collapse.")
                    raise ValueError(message.format(dim_coord))

            try:
                weights_array = iris.util.broadcast_to_shape(
                    np.array(weights.data, dtype=FLOAT_DTYPE),
                    cube.shape,
                    tuple(dim_map),
                )
            except ValueError:
                msg = ("Weights cube is not a compatible shape with the"
                       " data cube. Weights: {}, Diagnostic: {}".format(
                           weights.shape, cube.shape))
                raise ValueError(msg)

        return weights_array
Exemplo n.º 12
0
def check_data_sufficiency(
    historic_forecasts: Cube,
    truths: Cube,
    point_by_point: bool,
    proportion_of_nans: float,
):
    """Check whether there is sufficient valid data (i.e. values that are not NaN)
    within the historic forecasts and truths, in order to robustly compute EMOS
    coefficients.

    Args:
        historic_forecasts:
            Cube containing historic forcasts.
        truths:
            Cube containing truths.
        point_by_point:
            If True, coefficients are calculated independently for each
            point within the input cube by creating an initial guess and
            minimising each grid point independently.
        proportion_of_nans:
            The proportion of the matching historic forecast-truth pairs that
            are allowed to be NaN.

    Raises:
        ValueError: If the proportion of NaNs is higher than allowable for a site,
            if using point_by_point.
        ValueError: If the proportion of NaNs is higher than allowable when
            considering all sites.
    """
    if not historic_forecasts.coords("wmo_id"):
        return

    truths_data = np.broadcast_to(truths.data, historic_forecasts.shape)
    index = np.isnan(historic_forecasts.data) & np.isnan(truths_data)

    if point_by_point:
        wmo_id_axis = historic_forecasts.coord_dims("wmo_id")[0]
        non_wmo_id_axes = list(range(len(historic_forecasts.shape)))
        non_wmo_id_axes.pop(wmo_id_axis)
        detected_proportion = np.count_nonzero(
            index, axis=tuple(non_wmo_id_axes)) / np.prod(
                np.array(index.shape)[non_wmo_id_axes])
        if np.any(detected_proportion > proportion_of_nans):
            number_of_sites = np.sum(detected_proportion > proportion_of_nans)
            msg = (
                f"{number_of_sites} sites have a proportion of NaNs that is "
                f"higher than the allowable proportion of NaNs within the "
                "historic forecasts and truth pairs. The allowable proportion is "
                f"{proportion_of_nans}. The maximum proportion of NaNs is "
                f"{np.amax(detected_proportion)}.")
            raise ValueError(msg)
    else:
        detected_proportion = np.count_nonzero(index) / index.size
        if detected_proportion > proportion_of_nans:
            msg = (
                f"The proportion of NaNs detected is {detected_proportion}. "
                f"This is higher than the allowable proportion of NaNs within the "
                f"historic forecasts and truth pairs: {proportion_of_nans}.")
            raise ValueError(msg)
Exemplo n.º 13
0
def add_coordinate_to_cube(
    cube: Cube,
    new_coord: DimCoord,
    new_dim_location: int = 0,
    copy_metadata: bool = True,
) -> Cube:
    """Create a copy of input cube with an additional dimension coordinate
    added to the cube at the specified axis. The data from input cube is broadcast
    over this new dimension.

    Args:
        cube:
            cube to add realization dimension to.
        new_coord:
            new coordinate to add to input cube.
        new_dim_location:
            position in cube.data to position the new dimension coord. Default is
            to add the new coordinate as the leading dimension.
        copy_metadata:
            flag as to whether to carry metadata over to output cube.

    Returns:
        A copy of cube broadcast over the new dimension coordinate.
    """
    input_dim_count = len(cube.dim_coords)

    if (new_dim_location > input_dim_count) or (new_dim_location < 0):
        raise ValueError(
            f"New dimension location: {new_dim_location} incompatible \
                with cube containing {input_dim_count}.")

    new_dim_coords = list(cube.dim_coords) + [new_coord]
    new_dims = list(range(input_dim_count + 1))
    new_dim_coords_and_dims = list(zip(new_dim_coords, new_dims))

    aux_coords = cube.aux_coords
    aux_coord_dims = [cube.coord_dims(coord.name()) for coord in aux_coords]
    new_aux_coords_and_dims = list(zip(aux_coords, aux_coord_dims))

    new_coord_size = len(new_coord.points)
    new_data = np.broadcast_to(cube.data[..., np.newaxis],
                               shape=cube.shape + (new_coord_size, )).astype(
                                   cube.data.dtype)
    output_cube = Cube(
        new_data,
        dim_coords_and_dims=new_dim_coords_and_dims,
        aux_coords_and_dims=new_aux_coords_and_dims,
    )
    if copy_metadata:
        output_cube.metadata = cube.metadata

    final_dim_order = np.insert(np.arange(input_dim_count),
                                new_dim_location,
                                values=input_dim_count)
    output_cube.transpose(final_dim_order)

    return output_cube
Exemplo n.º 14
0
    def create_difference_cube(
        cube: Cube, coord_name: str, diff_along_axis: ndarray
    ) -> Cube:
        """
        Put the difference array into a cube with the appropriate
        metadata.

        Args:
            cube:
                Cube from which the differences have been calculated.
            coord_name:
                The name of the coordinate over which the difference
                have been calculated.
            diff_along_axis:
                Array containing the differences.

        Returns:
            Cube containing the differences calculated along the
            specified axis.
        """
        points = cube.coord(coord_name).points
        mean_points = (points[1:] + points[:-1]) / 2

        # Copy cube metadata and coordinates into a new cube.
        # Create a new coordinate for the coordinate along which the
        # difference has been calculated.
        metadata_dict = copy.deepcopy(cube.metadata._asdict())
        diff_cube = Cube(diff_along_axis, **metadata_dict)

        for coord in cube.dim_coords:
            dims = cube.coord_dims(coord)
            if coord.name() in [coord_name]:
                coord = coord.copy(points=mean_points)
            diff_cube.add_dim_coord(coord.copy(), dims)
        for coord in cube.aux_coords:
            dims = cube.coord_dims(coord)
            diff_cube.add_aux_coord(coord.copy(), dims)
        for coord in cube.derived_coords:
            dims = cube.coord_dims(coord)
            diff_cube.add_aux_coord(coord.copy(), dims)
        return diff_cube
Exemplo n.º 15
0
def create_regrid_cube(cube_array: ndarray, cube_in: Cube,
                       cube_out: Cube) -> Cube:
    """
    Create a regridded cube from regridded value(numpy array).
    Source cube_in must be in regular latitude/longitude coordinates.
    Target cube_out can be either regular latitude/longitude grid or equal area.

    Args:
        cube_array:
            regridded value (multidimensional)
        cube_in:
            source cube (for value's non-grid dimensions and attributes)
        cube_out:
            target cube (for target grid information)

    Returns:
         Regridded result cube
    """
    # generate a cube based on new data and cube_in
    cube_v = Cube(
        cube_array,
        standard_name=cube_in.standard_name,
        var_name=cube_in.var_name,
        units=cube_in.units,
        attributes=cube_in.attributes,
    )

    # use dim_coord from cube_in except lat/lon
    cube_coord_names = get_cube_coord_names(cube_in)
    lats_name, lons_name = latlon_names(cube_in)
    cube_coord_names.remove(lats_name)
    cube_coord_names.remove(lons_name)

    ndim = len(cube_coord_names)
    for i, val in enumerate(cube_coord_names):
        cube_v.add_dim_coord(cube_in.coord(val), i)

    # Put in suitable spatial coord from cube_out into cube_in
    cord_1, cord_2 = latlon_names(cube_out)
    cube_v.add_dim_coord(cube_out.coord(cord_1), ndim)
    cube_v.add_dim_coord(cube_out.coord(cord_2), ndim + 1)

    # add all aus_coords from cube_in
    for coord in cube_in.aux_coords:
        dims = np.array(cube_in.coord_dims(coord)) + 1
        cube_v.add_aux_coord(coord.copy(), dims)

    return cube_v
Exemplo n.º 16
0
    def calculate_difference(cube: Cube, coord_name: str) -> ndarray:
        """
        Calculate the difference along the axis specified by the
        coordinate.

        Args:
            cube:
                Cube from which the differences will be calculated.
            coord_name:
                Name of coordinate along which the difference is calculated.

        Returns:
            Array after the differences have been calculated along the
            specified axis.
        """
        diff_axis = cube.coord_dims(coord_name)[0]
        diff_along_axis = np.diff(cube.data, axis=diff_axis)
        return diff_along_axis
Exemplo n.º 17
0
    def _run_recursion(
        cube: Cube,
        smoothing_coefficients_x: Cube,
        smoothing_coefficients_y: Cube,
        iterations: int,
    ) -> Cube:
        """
        Method to run the recursive filter.

        Args:
            cube:
                2D cube containing the input data to which the recursive
                filter will be applied.
            smoothing_coefficients_x:
                2D cube containing array of smoothing_coefficient values that
                will be used when applying the recursive filter along the
                x-axis.
            smoothing_coefficients_y:
                2D cube containing array of smoothing_coefficient values that
                will be used when applying the recursive filter along the
                y-axis.
            iterations:
                The number of iterations of the recursive filter

        Returns:
            Cube containing the smoothed field after the recursive filter
            method has been applied to the input cube.
        """
        (x_index, ) = cube.coord_dims(cube.coord(axis="x").name())
        (y_index, ) = cube.coord_dims(cube.coord(axis="y").name())
        output = cube.data

        for _ in range(iterations):
            output = RecursiveFilter._recurse_forward(
                output, smoothing_coefficients_x.data, x_index)
            output = RecursiveFilter._recurse_backward(
                output, smoothing_coefficients_x.data, x_index)
            output = RecursiveFilter._recurse_forward(
                output, smoothing_coefficients_y.data, y_index)
            output = RecursiveFilter._recurse_backward(
                output, smoothing_coefficients_y.data, y_index)
            cube.data = output
        return cube
Exemplo n.º 18
0
    def _define_slice(self, cube: Cube) -> List[Coord]:
        """
        Returns a list of coordinates over which to slice the input cube to
        create a list of cubes for blending.

        Args:
            cube:
                Cube input to plugin

        Returns:
            List of coordinates defining the slice to iterate over
        """
        if cube.coord_dims(self.weighting_coord_name):
            slice_list = [
                cube.coord(self.weighting_coord_name),
                cube.coord(axis="y"),
                cube.coord(axis="x"),
            ]
        else:
            slice_list = [cube.coord(axis="y"), cube.coord(axis="x")]
        return slice_list
Exemplo n.º 19
0
    def find_coord_order(self, mcube: Cube) -> Tuple[int, int, int, int]:
        """Extract coordinate ordering within a cube.

        Use coord_dims to assess the dimension associated with a particular
        dimension coordinate. If a coordinate is not a dimension coordinate,
        then a NaN value will be returned for that coordinate.

        Args:
            mcube:
                cube to check the order of coordinate axis

        Returns:
            - position of x axis.
            - position of y axis.
            - position of z axis.
            - position of t axis.
        """
        coord_names = [self.x_name, self.y_name, self.z_name, self.t_name]
        positions = len(coord_names) * [np.nan]
        for coord_index, coord_name in enumerate(coord_names):
            if mcube.coords(coord_name, dim_coords=True):
                (positions[coord_index], ) = mcube.coord_dims(coord_name)
        return positions
Exemplo n.º 20
0
    def apply_circular_kernel(self, cube: Cube, ranges: int) -> Cube:
        """
        Method to apply a circular kernel to the data within the input cube in
        order to smooth the resulting field.

        Args:
            cube:
                Cube containing to array to apply CircularNeighbourhood
                processing to.
            ranges:
                Number of grid cells in the x and y direction used to create
                the kernel.

        Returns:
            Cube containing the smoothed field after the kernel has been
            applied.
        """
        data = cube.data
        full_ranges = np.zeros([np.ndim(data)])
        axes = []
        for axis in ["x", "y"]:
            coord_name = cube.coord(axis=axis).name()
            axes.append(cube.coord_dims(coord_name)[0])

        for axis in axes:
            full_ranges[axis] = ranges
        self.kernel = circular_kernel(full_ranges, ranges, self.weighted_mode)
        # Smooth the data by applying the kernel.
        if self.sum_or_fraction == "sum":
            total_area = 1.0
        else:
            # sum_or_fraction is in fraction mode
            total_area = np.sum(self.kernel)

        cube.data = correlate(data, self.kernel, mode="nearest") / total_area
        return cube
Exemplo n.º 21
0
    def _stack_subensembles(self, forecast_subensembles: Cube) -> Cube:
        """Stacking the realization and percentile dimensions in forecast_subensemble
        into a single realization dimension. Realization and percentile are assumed to
        be the first and second dimensions respectively.

        Args:
            input_cube:
                Cube containing the forecast_subensembles.

        Returns:
            Cube containing single realization dimension in place of the realization
            and percentile dimensions in forecast_subensemble.

        Raises:
            ValueError:
                if realization and percentile are not the first and second
                dimensions.
        """
        realization_percentile_dims = (
            *forecast_subensembles.coord_dims("realization"),
            *forecast_subensembles.coord_dims("percentile"),
        )
        if realization_percentile_dims != (0, 1):
            raise ValueError("Invalid dimension coordinate ordering.")
        realization_size = len(
            forecast_subensembles.coord("realization").points)
        percentile_size = len(forecast_subensembles.coord("percentile").points)
        new_realization_coord = DimCoord(
            points=np.arange(realization_size * percentile_size,
                             dtype=np.int32),
            standard_name="realization",
            units="1",
        )
        # As we are stacking the first two dimensions, we need to subtract 1 from all
        # dimension position values.
        dim_coords_and_dims = [(new_realization_coord, 0)]
        dim_coords = forecast_subensembles.coords(dim_coords=True)
        for coord in dim_coords:
            if coord.name() not in ["realization", "percentile"]:
                dims = tuple(
                    d - 1
                    for d in forecast_subensembles.coord_dims(coord.name()))
                dim_coords_and_dims.append((coord, dims))
        aux_coords_and_dims = []
        aux_coords = forecast_subensembles.coords(dim_coords=False)
        for coord in aux_coords:
            dims = tuple(
                d - 1 for d in forecast_subensembles.coord_dims(coord.name()))
            aux_coords_and_dims.append((coord, dims))
        # Stack the first two dimensions.
        superensemble_data = np.reshape(forecast_subensembles.data, (-1, ) +
                                        forecast_subensembles.data.shape[2:])
        superensemble_cube = Cube(
            superensemble_data,
            standard_name=forecast_subensembles.standard_name,
            long_name=forecast_subensembles.long_name,
            var_name=forecast_subensembles.var_name,
            units=forecast_subensembles.units,
            dim_coords_and_dims=dim_coords_and_dims,
            aux_coords_and_dims=aux_coords_and_dims,
            attributes=forecast_subensembles.attributes,
        )
        return superensemble_cube
Exemplo n.º 22
0
    def build_diagnostic_cube(
        neighbour_cube: Cube,
        diagnostic_cube: Cube,
        spot_values: ndarray,
        additional_dims: Optional[List[DimCoord]] = [],
        scalar_coords: Optional[List[AuxCoord]] = None,
        auxiliary_coords: Optional[List[AuxCoord]] = None,
        unique_site_id: Optional[Union[List[str], ndarray]] = None,
        unique_site_id_key: Optional[str] = None,
    ) -> Cube:
        """
        Builds a spot data cube containing the extracted diagnostic values.

        Args:
            neighbour_cube:
                This cube is needed as a source for information about the spot
                sites which needs to be included in the spot diagnostic cube.
            diagnostic_cube:
                The cube is needed to provide the name and units of the
                diagnostic that is being processed.
            spot_values:
                An array containing the diagnostic values extracted for the
                required spot sites.
            additional_dims:
                Optional list containing iris.coord.DimCoords with any leading
                dimensions required before spot data.
            scalar_coords:
                Optional list containing iris.coord.AuxCoords with all scalar coordinates
                relevant for the spot sites.
            auxiliary_coords:
                Optional list containing iris.coords.AuxCoords which are non-scalar.
            unique_site_id:
                Optional list of 8-digit unique site identifiers.
            unique_site_id_key:
                String to name the unique_site_id coordinate. Required if
                unique_site_id is in use.

        Returns:
            A spot data cube containing the extracted diagnostic data.
        """
        # Find any AuxCoords associated with the additional_dims so these can be copied too
        additional_dims_aux = []
        for dim_coord in additional_dims:
            dim_coord_dim = diagnostic_cube.coord_dims(dim_coord)
            aux_coords = [
                aux_coord
                for aux_coord in diagnostic_cube.aux_coords
                if diagnostic_cube.coord_dims(aux_coord) == dim_coord_dim
            ]
            additional_dims_aux.append(aux_coords if aux_coords else [])

        spot_diagnostic_cube = build_spotdata_cube(
            spot_values,
            diagnostic_cube.name(),
            diagnostic_cube.units,
            neighbour_cube.coord("altitude").points,
            neighbour_cube.coord(axis="y").points,
            neighbour_cube.coord(axis="x").points,
            neighbour_cube.coord("wmo_id").points,
            unique_site_id=unique_site_id,
            unique_site_id_key=unique_site_id_key,
            scalar_coords=scalar_coords,
            auxiliary_coords=auxiliary_coords,
            additional_dims=additional_dims,
            additional_dims_aux=additional_dims_aux,
        )
        return spot_diagnostic_cube
Exemplo n.º 23
0
def check_cube_coordinates(
        cube: Cube,
        new_cube: Cube,
        exception_coordinates: Optional[List[str]] = None) -> Cube:
    """Find and promote to dimension coordinates any scalar coordinates in
    new_cube that were originally dimension coordinates in the progenitor
    cube. If coordinate is in new_cube that is not in the old cube, keep
    coordinate in its current position.

    Args:
        cube:
            The input cube that will be checked to identify the preferred
            coordinate order for the output cube.
        new_cube:
            The cube that must be checked and adjusted using the coordinate
            order from the original cube.
        exception_coordinates:
            The names of the coordinates that are permitted to be within the
            new_cube but are not available within the original cube.

    Returns:
        Modified cube with relevant scalar coordinates promoted to
        dimension coordinates with the dimension coordinates re-ordered,
        as best as can be done based on the original cube.

    Raises:
        CoordinateNotFoundError : Raised if the final dimension
            coordinates of the returned cube do not match the input cube.
        CoordinateNotFoundError : If a coordinate is within in the permitted
            exceptions but is not in the new_cube.
    """
    if exception_coordinates is None:
        exception_coordinates = []

    # Promote available and relevant scalar coordinates
    cube_dim_names = [coord.name() for coord in cube.dim_coords]
    for coord in new_cube.aux_coords[::-1]:
        if coord.name() in cube_dim_names:
            new_cube = iris.util.new_axis(new_cube, coord)
    new_cube_dim_names = [coord.name() for coord in new_cube.dim_coords]
    # If we have the wrong number of dimensions then raise an error.
    if len(cube.dim_coords) + len(exception_coordinates) != len(
            new_cube.dim_coords):

        msg = ("The number of dimension coordinates within the new cube "
               "do not match the number of dimension coordinates within the "
               "original cube plus the number of exception coordinates. "
               "\n input cube dimensions {}, new cube dimensions {}".format(
                   cube_dim_names, new_cube_dim_names))
        raise CoordinateNotFoundError(msg)

    # Ensure dimension order matches
    new_cube_dimension_order = {
        coord.name(): new_cube.coord_dims(coord.name())[0]
        for coord in new_cube.dim_coords
    }
    correct_order = []
    new_cube_only_dims = []
    for coord_name in cube_dim_names:
        correct_order.append(new_cube_dimension_order[coord_name])
    for coord_name in exception_coordinates:
        try:
            new_coord_dim = new_cube.coord_dims(coord_name)[0]
            new_cube_only_dims.append(new_coord_dim)
        except CoordinateNotFoundError:
            msg = ("All permitted exception_coordinates must be on the"
                   " new_cube. In this case, coordinate {0} within the list "
                   "of permitted exception_coordinates ({1}) is not available"
                   " on the new_cube.").format(coord_name,
                                               exception_coordinates)
            raise CoordinateNotFoundError(msg)

    correct_order = np.array(correct_order)
    for dim in new_cube_only_dims:
        correct_order = np.insert(correct_order, dim, dim)

    new_cube.transpose(correct_order)

    return new_cube
Exemplo n.º 24
0
def create_new_diagnostic_cube(
    name: str,
    units: Union[Unit, str],
    template_cube: Cube,
    mandatory_attributes: Union[Dict[str, str], LimitedAttributeDict],
    optional_attributes: Optional[Union[Dict[str, str],
                                        LimitedAttributeDict]] = None,
    data: Optional[Union[MaskedArray, ndarray]] = None,
    dtype: Type = np.float32,
) -> Cube:
    """
    Creates a new diagnostic cube with suitable metadata.

    Args:
        name:
            Standard or long name for output cube
        units:
            Units for output cube
        template_cube:
            Cube from which to copy dimensional and auxiliary coordinates
        mandatory_attributes:
            Dictionary containing values for the mandatory attributes
            "title", "source" and "institution".  These are overridden by
            values in the optional_attributes dictionary, if specified.
        optional_attributes:
            Dictionary of optional attribute names and values.  If values for
            mandatory attributes are included in this dictionary they override
            the values of mandatory_attributes.
        data:
            Data array.  If not set, cube is filled with zeros using a lazy
            data object, as this will be overwritten later by the caller
            routine.
        dtype:
            Datatype for dummy cube data if "data" argument is None.

    Returns:
        Cube with correct metadata to accommodate new diagnostic field
    """
    attributes = mandatory_attributes
    if optional_attributes is not None:
        attributes.update(optional_attributes)

    error_msg = ""
    for attr in MANDATORY_ATTRIBUTES:
        if attr not in attributes:
            error_msg += "{} attribute is required\n".format(attr)
    if error_msg:
        raise ValueError(error_msg)

    if data is None:
        data = da.zeros_like(template_cube.core_data(), dtype=dtype)

    aux_coords_and_dims, dim_coords_and_dims = [[
        (coord.copy(), template_cube.coord_dims(coord))
        for coord in getattr(template_cube, coord_type)
    ] for coord_type in ("aux_coords", "dim_coords")]

    cube = iris.cube.Cube(
        data,
        units=units,
        attributes=attributes,
        dim_coords_and_dims=dim_coords_and_dims,
        aux_coords_and_dims=aux_coords_and_dims,
    )
    cube.rename(name)

    return cube
Exemplo n.º 25
0
    def run(self, cube: Cube) -> None:
        """Populates self-consistent interpreted parameters, or raises collated errors
        describing (as far as posible) how the metadata are a) not self-consistent,
        and / or b) not consistent with the Met Office IMPROVER standard.

        Although every effort has been made to return as much information as possible,
        collated errors may not be complete if the issue is fundamental. The developer
        is advised to rerun this tool after each fix, until no further problems are
        raised.
        """

        # 1) Interpret diagnostic and type-specific metadata, including cell methods
        if cube.name() in ANCILLARIES:
            self.field_type = self.ANCIL
            self.diagnostic = cube.name()
            if cube.cell_methods:
                self.errors.append(
                    f"Unexpected cell methods {cube.cell_methods}")

        elif cube.name() in SPECIAL_CASES:
            self.field_type = self.diagnostic = cube.name()
            if cube.name() == "weather_code":
                if cube.cell_methods:
                    self.errors.append(
                        f"Unexpected cell methods {cube.cell_methods}")
            elif cube.name() == "wind_from_direction":
                if cube.cell_methods:
                    expected = CellMethod(method="mean", coords="realization")
                    if len(cube.cell_methods
                           ) > 1 or cube.cell_methods[0] != expected:
                        self.errors.append(
                            f"Unexpected cell methods {cube.cell_methods}")
            else:
                self.unhandled = True
                return

        else:
            if "probability" in cube.name() and "threshold" in cube.name():
                self.field_type = self.PROB
                self.check_probability_cube_metadata(cube)
            else:
                self.diagnostic = cube.name()
                try:
                    perc_coord = find_percentile_coordinate(cube)
                except CoordinateNotFoundError:
                    coords = get_coord_names(cube)
                    if any([
                            cube.coord(coord).var_name == "threshold"
                            for coord in coords
                    ]):
                        self.field_type = self.PROB
                        self.check_probability_cube_metadata(cube)
                    else:
                        self.field_type = self.DIAG
                else:
                    self.field_type = self.PERC
                    if perc_coord.name() != PERC_COORD:
                        self.errors.append(
                            f"Percentile coordinate should have name {PERC_COORD}, "
                            f"has {perc_coord.name()}")

                    if perc_coord.units != "%":
                        self.errors.append(
                            "Percentile coordinate should have units of %, "
                            f"has {perc_coord.units}")

            self.check_cell_methods(cube)

        # 2) Interpret model and blend information from cube attributes
        self.check_attributes(cube.attributes)

        # 3) Check whether expected coordinates are present
        coords = get_coord_names(cube)
        if "spot_index" in coords:
            self.check_spot_data(cube, coords)

        if self.field_type == self.ANCIL:
            # there is no definitive standard for time coordinates on static ancillaries
            pass
        elif (cube.coords("time") and len(cube.coord_dims("time")) == 2
              and not self.blended):
            # 2D time coordinates are only present on global day-max diagnostics that
            # use a local time zone coordinate. These do not have a 2D forecast period.
            expected_coords = set(LOCAL_TIME_COORDS + UNBLENDED_TIME_COORDS)
            expected_coords.discard("forecast_period")
            self._check_coords_present(coords, expected_coords)
        elif self.blended:
            self._check_coords_present(coords, BLENDED_TIME_COORDS)
        else:
            self._check_coords_present(coords, UNBLENDED_TIME_COORDS)

        # 4) Check points are equal to upper bounds for bounded time coordinates
        for coord in ["time", "forecast_period"]:
            if coord in get_coord_names(cube):
                self._check_coord_bounds(cube, coord)

        # 5) Check datatypes on data and coordinates
        try:
            check_mandatory_standards(cube)
        except ValueError as cause:
            self.errors.append(str(cause))

        # 6) Raise collated errors if present
        if self.errors:
            raise ValueError("\n".join(self.errors))