Exemple #1
0
    def create_symbol_cube(self, cubes: Union[List[Cube], CubeList]) -> Cube:
        """
        Create an empty weather symbol cube

        Args:
            cubes:
                List of input cubes used to generate weather symbols

        Returns:
            A cube with suitable metadata to describe the weather symbols
            that will fill it and data initiated with the value -1 to allow
            any unset points to be readily identified.
        """
        threshold_coord = find_threshold_coordinate(self.template_cube)
        template_cube = next(self.template_cube.slices_over([threshold_coord
                                                             ])).copy()
        # remove coordinates and bounds that do not apply to weather symbols
        template_cube.remove_coord(threshold_coord)

        mandatory_attributes = generate_mandatory_attributes(cubes)
        optional_attributes = weather_code_attributes()
        if self.model_id_attr:
            optional_attributes.update(
                update_model_id_attr_attribute(cubes, self.model_id_attr))

        symbols = create_new_diagnostic_cube(
            "weather_code",
            "1",
            template_cube,
            mandatory_attributes,
            optional_attributes=optional_attributes,
            data=np.ma.masked_all_like(template_cube.data).astype(np.int32),
        )
        return symbols
Exemple #2
0
    def check_probability_cube_metadata(self, cube: Cube) -> None:
        """Checks probability-specific metadata"""
        if cube.units != "1":
            self.errors.append(
                f"Expected units of 1 on probability data, got {cube.units}")

        try:
            self.diagnostic = get_diagnostic_cube_name_from_probability_name(
                cube.name())
        except ValueError as cause:
            # if the probability name is not valid
            self.errors.append(str(cause))

        expected_threshold_name = get_threshold_coord_name_from_probability_name(
            cube.name())

        if not cube.coords(expected_threshold_name):
            msg = f"Cube does not have expected threshold coord '{expected_threshold_name}'; "
            try:
                threshold_name = find_threshold_coordinate(cube).name()
            except CoordinateNotFoundError:
                coords = [coord.name() for coord in cube.coords()]
                msg += (
                    f"no coord with var_name='threshold' found in all coords: {coords}"
                )
                self.errors.append(msg)
            else:
                msg += f"threshold coord has incorrect name '{threshold_name}'"
                self.errors.append(msg)
                self.check_threshold_coordinate_properties(
                    cube.name(), cube.coord(threshold_name))
        else:
            threshold_coord = cube.coord(expected_threshold_name)
            self.check_threshold_coordinate_properties(cube.name(),
                                                       threshold_coord)
Exemple #3
0
 def setUp(self):
     """ Set up temporary input cube """
     self.precip_cube = set_up_precip_probability_cube()
     self.threshold_coord = find_threshold_coordinate(self.precip_cube).name()
     self.uk_gridded_cube = set_up_uk_gridded_cube()
     self.global_gridded_cube = set_up_global_gridded_cube()
     self.units_dict = {self.threshold_coord: "mm h-1"}
    def process(self, cube):
        """
        Calculate probabilities between thresholds for the input cube

        Args:
            cube (iris.cube.Cube):
                Probability cube containing thresholded data (above or below)

        Returns:
            iris.cube.Cube:
                Cube containing probability of occurrence between thresholds
        """
        # if cube has no threshold-type coordinate, raise an error
        try:
            self.thresh_coord = find_threshold_coordinate(cube)
        except CoordinateNotFoundError:
            raise ValueError("Input is not a probability cube "
                             "(has no threshold-type coordinate)")
        self.cube = cube.copy()

        # check input cube units and convert if needed
        original_units = self.thresh_coord.units
        if original_units != self.threshold_units:
            self.cube.coord(self.thresh_coord).convert_units(
                self.threshold_units)

        # extract suitable cube slices
        self.cube_slices = self._slice_cube()

        # generate "between thresholds" probabilities
        output_cube = self._calculate_probabilities()
        self._update_metadata(output_cube, original_units)
        return output_cube
Exemple #5
0
    def _update_metadata(cube: Cube) -> Cube:
        """
        Modify the meta data of input cube to resemble a Nowcast of lightning
        probability.

        1. Rename to "probability_of_rate_of_lightning_above_threshold"

        2. Remove "threshold" coord
        (or causes iris.exceptions.CoordinateNotFoundError)

        3. Discard all cell_methods

        Args:
            cube:
                An input cube

        Returns:
            Output cube - a copy of input cube with meta-data relating to
            a Nowcast of lightning probability.
            The data array will be a copy of the input cube.data
        """
        new_cube = cube.copy()
        new_cube.rename("probability_of_rate_of_lightning_above_threshold")
        threshold_coord = find_threshold_coordinate(new_cube)
        new_cube.remove_coord(threshold_coord)
        new_cube.cell_methods = None
        return new_cube
    def create_symbol_cube(cubes):
        """
        Create an empty weather symbol cube

        Args:
            cubes (list or iris.cube.CubeList):
                List of input cubes used to generate weather symbols
        Returns:
            iris.cube.Cube:
                A cube with suitable metadata to describe the weather symbols
                that will fill it
        """
        threshold_coord = find_threshold_coordinate(cubes[0])
        template_cube = next(cubes[0].slices_over([threshold_coord])).copy()
        # remove coordinates and bounds that do not apply to weather symbols
        template_cube.remove_coord(threshold_coord)
        for coord in template_cube.coords():
            if coord.name() in ['forecast_period', 'time']:
                coord.bounds = None

        attributes = generate_mandatory_attributes(cubes)
        symbols = create_new_diagnostic_cube(
            "weather_code",
            "1",
            template_cube,
            attributes,
            optional_attributes=weather_code_attributes(),
            dtype=np.int32)
        return symbols
Exemple #7
0
    def test_check_single_threshold(self):
        """
        Test that the plugin returns an Iris.cube.Cube with the expected
        data values for the percentiles, if a single threshold is used for
        constructing the percentiles.
        """
        data = np.array(
            [
                [[12.2, 8.0, 12.2], [-16.0, 8.0, -30.4], [-30.4, -34.0, -35.2]
                 ],
                [
                    [29.0, 26.66666667, 29.0],
                    [23.75, 26.66666667, 8.0],
                    [8.0, -10.0, -16.0],
                ],
                [
                    [45.8, 45.33333333, 45.8],
                    [44.75, 45.33333333, 41.6],
                    [41.6, 29.0, 3.2],
                ],
            ],
            dtype=np.float32,
        )

        threshold_coord = find_threshold_coordinate(self.cube)
        cube = next(self.cube.slices_over(threshold_coord))

        result = Plugin()._probabilities_to_percentiles(
            cube, self.percentiles, self.bounds_pairing)
        self.assertArrayAlmostEqual(result.data, data, decimal=5)
    def test_check_single_threshold(self):
        """
        Test that the plugin returns an Iris.cube.Cube with the expected
        data values for the percentiles, if a single threshold is used for
        constructing the percentiles.
        """
        data = np.array(
            [[[[12.2, 8., 12.2], [-16., 8., -30.4], [-30.4, -34., -35.2]]],
             [[[29., 26.66666667, 29.], [23.75, 26.66666667, 8.],
               [8., -10., -16.]]],
             [[[45.8, 45.33333333, 45.8], [44.75, 45.33333333, 41.6],
               [41.6, 29., 3.2]]]],
            dtype=np.float32)

        threshold_coord = find_threshold_coordinate(
            self.current_temperature_forecast_cube)
        for acube in self.current_temperature_forecast_cube.slices_over(
                threshold_coord):
            cube = acube
            break
        percentiles = [10, 50, 90]
        bounds_pairing = (-40, 50)
        plugin = Plugin()
        result = plugin._probabilities_to_percentiles(cube, percentiles,
                                                      bounds_pairing)
        self.assertArrayAlmostEqual(result.data, data, decimal=5)
 def setUp(self):
     """Set up cube for testing range constraint."""
     self.precip_cube = set_up_precip_probability_cube()
     self.coord_name = find_threshold_coordinate(
         self.precip_cube).name()
     self.precip_cube.coord(self.coord_name).convert_units("mm h-1")
     self.expected_data = self.precip_cube[:2].data
Exemple #10
0
def process(cube: cli.inputcube,
            *,
            threshold_ranges: cli.inputjson,
            threshold_units=None):
    """
    Calculate the probabilities of occurrence between thresholds

    Args:
        cube (iris.cube.Cube):
            Cube containing input probabilities above or below threshold
        threshold_ranges (list):
            List of 2-item iterables specifying thresholds between which
            probabilities should be calculated
        threshold_units (str):
            Units in which the thresholds are specified.  If None, defaults
            to the units of the threshold coordinate on the input cube.

    Returns:
        iris.cube.Cube:
            Cube containing probability of occurrences between the thresholds
            specified
    """
    from improver.between_thresholds import OccurrenceBetweenThresholds
    from improver.metadata.probabilistic import find_threshold_coordinate

    if threshold_units is None:
        threshold_units = str(find_threshold_coordinate(cube).units)

    plugin = OccurrenceBetweenThresholds(threshold_ranges, threshold_units)
    return plugin.process(cube)
Exemple #11
0
 def test_old_convention(self):
     """Test function recognises threshold coordinate with name "threshold"
     """
     threshold_coord = find_threshold_coordinate(self.cube_old)
     self.assertEqual(threshold_coord.name(), "threshold")
     self.assertArrayAlmostEqual(threshold_coord.points,
                                 self.threshold_points)
Exemple #12
0
 def test_ordering_for_realization_threshold_percentile_coordinate(self):
     """Test that the cube has been reordered, if it is originally in an
     undesirable order and the cube contains a "threshold" coordinate,
     a "realization" coordinate and a "percentile" coordinate."""
     cube = set_up_probability_cube(
         np.zeros((3, 4, 5), dtype=np.float32),
         np.array([273.0, 274.0, 275.0], dtype=np.float32),
     )
     cube = add_coordinate(cube, [0, 1, 2],
                           "realization",
                           dtype=np.int32,
                           coord_units="1")
     cube = add_coordinate(cube, [10, 50, 90],
                           "percentile",
                           dtype=np.float32,
                           coord_units="%")
     cube.transpose([4, 3, 2, 1, 0])
     save_netcdf(cube, self.filepath)
     result = load_cube(self.filepath)
     threshold_coord = find_threshold_coordinate(result)
     self.assertEqual(result.coord_dims("realization")[0], 0)
     self.assertEqual(result.coord_dims("percentile")[0], 1)
     self.assertEqual(result.coord_dims(threshold_coord)[0], 2)
     self.assertArrayAlmostEqual(result.coord_dims("latitude")[0], 3)
     self.assertArrayAlmostEqual(result.coord_dims("longitude")[0], 4)
Exemple #13
0
    def apply_ice(self, prob_lightning_cube, ice_cube):
        """
        Modify Nowcast of lightning probability with ice data from a radar
        composite (VII; Vertically Integrated Ice)

        Args:
            prob_lightning_cube (iris.cube.Cube):
                First-guess lightning probability.
                The forecast_period coord is modified in-place to "minutes".
            ice_cube (iris.cube.Cube):
                Analysis of vertically integrated ice (VII) from radar
                thresholded at self.ice_thresholds.
                Units of threshold coord modified in-place to kg m^-2

        Returns:
            iris.cube.Cube:
                Output cube containing updated nowcast lightning probability.
                This cube will have the same dimensions and meta-data as
                prob_lightning_cube.
                The influence of the data in ice_cube reduces linearly to zero
                as forecast_period increases to 2H30M.

        Raises:
            iris.exceptions.ConstraintMismatchError:
                If ice_cube does not contain the expected thresholds.
        """
        prob_lightning_cube.coord("forecast_period").convert_units("minutes")
        # check prob-ice threshold units are as expected
        ice_threshold_coord = find_threshold_coordinate(ice_cube)
        ice_threshold_coord.convert_units("kg m^-2")
        new_cube_list = iris.cube.CubeList([])
        err_string = "No matching prob(Ice) cube for threshold {}"
        for cube_slice in prob_lightning_cube.slices_over("time"):
            fcmins = cube_slice.coord("forecast_period").points[0]
            for threshold, prob_max in zip(self.ice_thresholds,
                                           self.ice_scaling):
                ice_slice = ice_cube.extract(
                    iris.Constraint(coord_values={
                        ice_threshold_coord:
                        lambda t: isclose(t.point, threshold)
                    }))
                if not isinstance(ice_slice, iris.cube.Cube):
                    raise ConstraintMismatchError(err_string.format(threshold))
                # Linearly reduce impact of ice as fcmins increases to 2H30M.
                ice_scaling = [0.0, (prob_max * (1.0 - (fcmins / 150.0)))]
                if ice_scaling[1] > 0:
                    cube_slice.data = np.maximum(
                        rescale(
                            ice_slice.data,
                            data_range=(0.0, 1.0),
                            scale_range=ice_scaling,
                            clip=True,
                        ),
                        cube_slice.data,
                    )
            new_cube_list.append(cube_slice)

        new_cube = new_cube_list.merge_cube()
        new_cube = check_cube_coordinates(prob_lightning_cube, new_cube)
        return new_cube
Exemple #14
0
    def iterate_over_threshold(self, cubelist, threshold):
        """
        Iterate over the application of thresholding to multiple cubes.

        Args:
            cubelist (iris.cube.CubeList):
                Cubelist containing cubes to be thresholded.
            threshold (float):
                The threshold that will be applied.

        Returns:
            iris.cube.CubeList:
                Cubelist after thresholding each cube.
        """
        cubes = iris.cube.CubeList([])
        for cube in cubelist:
            threshold_cube = BasicThreshold(
                threshold, fuzzy_factor=self.fuzzy_factor,
                comparison_operator=self.comparison_operator)(cube.copy())
            # Will only ever contain one slice on threshold
            for cube_slice in threshold_cube.slices_over(
                    find_threshold_coordinate(threshold_cube)):
                threshold_cube = cube_slice

            cubes.append(threshold_cube)
        return cubes
 def test_missing_threshold_coord(self):
     """Test that the method raises an error in Iris if the cube doesn't
     have a threshold coordinate to remove."""
     self.cube.remove_coord(find_threshold_coordinate(self.cube))
     msg = "No threshold coord found"
     with self.assertRaisesRegex(CoordinateNotFoundError, msg):
         self.plugin._update_metadata(self.cube)
 def setUp(self):
     """Set up current_temperature_forecast_cube for testing."""
     self.current_temperature_forecast_cube = (
         add_forecast_reference_time_and_forecast_period(
             set_up_probability_above_threshold_temperature_cube()))
     self.threshold_points = find_threshold_coordinate(
         self.current_temperature_forecast_cube).points
    def create_symbol_cube(cubes):
        """
        Create an empty weather symbol cube

        Args:
            cubes (list or iris.cube.CubeList):
                List of input cubes used to generate weather symbols
        Returns:
            iris.cube.Cube:
                A cube with suitable metadata to describe the weather symbols
                that will fill it and data initiated with the value -1 to allow
                any unset points to be readily identified.
        """
        threshold_coord = find_threshold_coordinate(cubes[0])
        template_cube = next(cubes[0].slices_over([threshold_coord])).copy()
        # remove coordinates and bounds that do not apply to weather symbols
        template_cube.remove_coord(threshold_coord)
        for coord in template_cube.coords():
            if coord.name() in ["forecast_period", "time"]:
                coord.bounds = None

        attributes = generate_mandatory_attributes(cubes)
        symbols = create_new_diagnostic_cube(
            "weather_code",
            "1",
            template_cube,
            attributes,
            optional_attributes=weather_code_attributes(),
            data=np.ma.masked_all_like(template_cube.data).astype(np.int32),
        )
        return symbols
Exemple #18
0
 def setUp(self):
     """
     Set up a basic cube and linear weights cube for the process
     method. Input cube has 2 thresholds and 3 forecast_reference_times
     """
     thresholds = [10, 20]
     data = np.ones((2, 2, 3), dtype=np.float32)
     cycle1 = set_up_probability_cube(
         data,
         thresholds,
         spatial_grid="equalarea",
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 0, 0),
     )
     cycle2 = set_up_probability_cube(
         data,
         thresholds,
         spatial_grid="equalarea",
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 1, 0),
     )
     cycle3 = set_up_probability_cube(
         data,
         thresholds,
         spatial_grid="equalarea",
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 2, 0),
     )
     self.cube_to_collapse = CubeList([cycle1, cycle2, cycle3]).merge_cube()
     self.cube_to_collapse = squeeze(self.cube_to_collapse)
     self.cube_to_collapse.rename("weights")
     # This input array has 3 forecast reference times and 2 thresholds.
     # The two thresholds have the same weights.
     self.cube_to_collapse.data = np.array(
         [
             [[[1, 0, 1], [1, 1, 1]], [[1, 0, 1], [1, 1, 1]]],
             [[[0, 0, 1], [0, 1, 1]], [[0, 0, 1], [0, 1, 1]]],
             [[[1, 1, 1], [1, 1, 1]], [[1, 1, 1], [1, 1, 1]]],
         ],
         dtype=np.float32,
     )
     self.cube_to_collapse.data = np.ma.masked_equal(self.cube_to_collapse.data, 0)
     # Create a one_dimensional weights cube by slicing the larger
     # weights cube.
     # The resulting cube only has a forecast_reference_time coordinate.
     self.one_dimensional_weights_cube = self.cube_to_collapse[:, 0, 0, 0]
     self.one_dimensional_weights_cube.remove_coord("projection_x_coordinate")
     self.one_dimensional_weights_cube.remove_coord("projection_y_coordinate")
     self.one_dimensional_weights_cube.remove_coord(
         find_threshold_coordinate(self.one_dimensional_weights_cube)
     )
     self.one_dimensional_weights_cube.data = np.array(
         [0.2, 0.5, 0.3], dtype=np.float32
     )
     self.plugin = SpatiallyVaryingWeightsFromMask(
         "forecast_reference_time", fuzzy_length=2
     )
     self.plugin_no_fuzzy = SpatiallyVaryingWeightsFromMask(
         "forecast_reference_time", fuzzy_length=1
     )
def enforce_coordinate_ordering(cube, coord_names, anchor_start=True):
    """
    Function to reorder dimensions within a cube.
    Note that the input cube is modified in place.

    Args:
        cube (iris.cube.Cube):
            Cube where the ordering will be enforced to match the order within
            the coord_names. This input cube will be modified as part of this
            function.
        coord_names (list or str):
            List of the names of the coordinates to order. If a string is
            passed in, only the single specified coordinate is reordered.
        anchor_start (bool):
            Define whether the specified coordinates should be moved to the
            start (True) or end (False) of the list of dimensions. If True, the
            coordinates are inserted as the first dimensions in the order in
            which they are provided. If False, the coordinates are moved to the
            end. For example, if the specified coordinate names are
            ["time", "realization"] then "realization" will be the last
            coordinate within the cube, whilst "time" will be the last but one.
    """
    if isinstance(coord_names, str):
        coord_names = [coord_names]

    # construct a list of dimensions on the cube to be reordered
    dim_coord_names = get_dim_coord_names(cube)
    coords_to_reorder = []
    for coord in coord_names:
        if coord == "threshold":
            try:
                coord = find_threshold_coordinate(cube).name()
            except CoordinateNotFoundError:
                continue
        if coord in dim_coord_names:
            coords_to_reorder.append(coord)

    # construct dictionary of original dimensions of the form, eg:
    # {'time': 0, 'realization': 1, ...}
    original_dims = {}
    for coord in cube.coords(dim_coords=True):
        original_dims[coord.name()] = cube.coord_dims(coord)[0]

    # construct list of reordered dimensions assuming start anchor
    new_dims = []
    for coord in coords_to_reorder:
        new_dims.append(original_dims[coord])
    for coord in cube.coords(dim_coords=True):
        if original_dims[coord.name()] not in new_dims:
            new_dims.append(original_dims[coord.name()])

    # if anchor is end, reshuffle the list
    if not anchor_start:
        new_dims_end = new_dims[len(coords_to_reorder):]
        new_dims_end.extend(new_dims[:len(coords_to_reorder)])
        new_dims = new_dims_end

    # transpose cube using new coordinate order
    cube.transpose(new_dims)
Exemple #20
0
 def test_new_convention(self):
     """Test function recognises threshold coordinate with standard
     diagnostic name and "threshold" as var_name"""
     threshold_coord = find_threshold_coordinate(self.cube_new)
     self.assertEqual(threshold_coord.name(), "air_temperature")
     self.assertEqual(threshold_coord.var_name, "threshold")
     self.assertArrayAlmostEqual(threshold_coord.points,
                                 self.threshold_points)
 def test_precip_has_no_thresholds(self):
     """Test that the method raises an error if the threshold coord is
     omitted from the precip_cube"""
     threshold_coord = find_threshold_coordinate(self.precip_cube)
     self.precip_cube.remove_coord(threshold_coord)
     msg = "No threshold coord found"
     with self.assertRaisesRegex(CoordinateNotFoundError, msg):
         self.plugin(CubeList([self.fg_cube, self.ltng_cube, self.precip_cube]))
Exemple #22
0
 def setUp(self):
     """Create a cube with threshold coord is not first coord."""
     self.cube1 = create_cube_with_threshold()
     self.cube2 = add_coordinate(self.cube1,
                                 np.arange(2).astype(np.float32),
                                 "realization",
                                 dtype=np.float32)
     self.coord_name = find_threshold_coordinate(self.cube1).name()
Exemple #23
0
    def process(self, cubelist):
        """
        Produce Nowcast of lightning probability.

        Args:
            cubelist (iris.cube.CubeList):
                Where thresholds are listed, only these threshold values will
                    be used.
                Contains cubes of
                    * First-guess lightning probability
                    * Nowcast precipitation probability
                        (required thresholds: > 0.5, 7., 35. mm hr-1)
                    * Nowcast lightning rate
                    * (optional) Analysis of vertically integrated ice (VII)
                      from radar thresholded into probability slices
                      at self.ice_thresholds.

        Returns:
            iris.cube.Cube:
                Output cube containing Nowcast lightning probability.
                This cube will have the same dimensions as the input
                Nowcast precipitation probability after the threshold coord
                has been removed.

        Raises:
            iris.exceptions.ConstraintMismatchError:
                If cubelist does not contain the expected cubes.
        """
        first_guess_lightning_cube = cubelist.extract(
            "probability_of_rate_of_lightning_above_threshold", strict=True)
        lightning_rate_cube = cubelist.extract(
            "rate_of_lightning", strict=True)
        lightning_rate_cube.convert_units("min^-1")  # Ensure units are correct
        prob_precip_cube = cubelist.extract(
            "probability_of_lwe_precipitation_rate_above_threshold",
            strict=True)
        # Now find prob_vii_cube. Can't use strict=True here as cube may not be
        # present, so will use a normal extract and then merge_cube if needed.
        prob_vii_cube = cubelist.extract(
            "probability_of_vertical_integral_of_ice_above_threshold")
        if prob_vii_cube:
            prob_vii_cube = prob_vii_cube.merge_cube()
        precip_threshold_coord = find_threshold_coordinate(prob_precip_cube)
        precip_threshold_coord.convert_units('mm hr-1')
        precip_slice = prob_precip_cube.extract(
            iris.Constraint(coord_values={
                precip_threshold_coord: lambda t: isclose(t.point, 0.5)}))
        if not isinstance(precip_slice, iris.cube.Cube):
            raise ConstraintMismatchError(
                "Cannot find prob(precip > 0.5 mm hr-1) cube in cubelist.")
        template_cube = self._update_metadata(precip_slice)
        new_cube = self._modify_first_guess(
            template_cube, first_guess_lightning_cube, lightning_rate_cube,
            prob_precip_cube, prob_vii_cube)
        # Adjust data so that lightning probability does not decrease too
        # rapidly with distance.
        self.neighbourhood.process(new_cube)
        return new_cube
Exemple #24
0
    def _threshold_coords_equivalent(forecast, reliability_table):
        """Ensure that the threshold coordinates are identical in the
        reliability table and in the forecast cube. If not raise an
        exception.

        Args:
            forecast (iris.cube.Cube):
                The forecast to be calibrated.
            reliability_table (iris.cube.Cube):
                The reliability table to use for applying calibration.
        Raises:
            ValueError: If the threshold coordinates are different in the two
                        cubes.
        """
        if not (find_threshold_coordinate(forecast)
                == find_threshold_coordinate(reliability_table)):
            raise ValueError("Threshold coordinates do not match between "
                             "reliability table and forecast cube.")
Exemple #25
0
 def test_varying_mask_fail(self):
     """Test error is raised when mask varies along collapsing dim"""
     # Check fails when blending along threshold coordinate, as mask
     # varies along this coordinate.
     threshold_coord = find_threshold_coordinate(self.cube_to_collapse)
     message = "The mask on the input cube can only vary along the blend_coord"
     plugin = SpatiallyVaryingWeightsFromMask(threshold_coord.name())
     with self.assertRaisesRegex(ValueError, message):
         plugin._create_template_slice(self.cube_to_collapse)
Exemple #26
0
    def process(self, forecast, reliability_table):
        """
        Apply reliability calibration to a forecast. The reliability table
        and the forecast cube must share an identical threshold coordinate.

        Args:
            forecast (iris.cube.Cube):
                The forecast to be calibrated.
            reliability_table (iris.cube.Cube):
                The reliability table to use for applying calibration.
        Returns:
            calibrated_forecast (iris.cube.Cube):
                The forecast cube following calibration.
        """
        self._threshold_coords_equivalent(forecast, reliability_table)
        self.threshold_coord = find_threshold_coordinate(forecast)

        forecast_thresholds = forecast.slices_over(self.threshold_coord)
        reliability_thresholds = reliability_table.slices_over(self.threshold_coord)
        slices = zip(forecast_thresholds, reliability_thresholds)

        uncalibrated_thresholds = []
        calibrated_cubes = iris.cube.CubeList()
        for forecast_threshold, reliability_threshold in slices:

            (
                reliability_probabilities,
                observation_frequencies,
            ) = self._calculate_reliability_probabilities(reliability_threshold)

            if reliability_probabilities is None:
                calibrated_cubes.append(forecast_threshold)
                uncalibrated_thresholds.append(
                    forecast_threshold.coord(self.threshold_coord).points[0]
                )
                continue

            interpolated = self._interpolate(
                forecast_threshold.data,
                reliability_probabilities,
                observation_frequencies,
            )

            calibrated_cubes.append(forecast_threshold.copy(data=interpolated))

        calibrated_forecast = calibrated_cubes.merge_cube()
        self._ensure_monotonicity(calibrated_forecast)

        if uncalibrated_thresholds:
            msg = (
                "The following thresholds were not calibrated due to "
                "insufficient forecast counts in reliability table bins: "
                "{}".format(uncalibrated_thresholds)
            )
            warnings.warn(msg)

        return calibrated_forecast
Exemple #27
0
 def setUp(self):
     """
     Set up a basic weights cube with 2 thresholds to multiple with
     a cube with one_dimensional weights.
     """
     thresholds = [10, 20]
     data = np.ones((2, 2, 3), dtype=np.float32)
     cycle1 = set_up_probability_cube(
         data,
         thresholds,
         spatial_grid="equalarea",
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 0, 0),
     )
     cycle2 = set_up_probability_cube(
         data,
         thresholds,
         spatial_grid="equalarea",
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 1, 0),
     )
     cycle3 = set_up_probability_cube(
         data,
         thresholds,
         spatial_grid="equalarea",
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 2, 0),
     )
     self.spatial_weights_cube = CubeList([cycle1, cycle2,
                                           cycle3]).merge_cube()
     self.spatial_weights_cube = squeeze(self.spatial_weights_cube)
     self.spatial_weights_cube.rename("weights")
     # This input array has 3 forecast reference times and 2 thresholds.
     # The two thresholds have the same weights.
     self.spatial_weights_cube.data = np.array(
         [
             [[[1, 0, 1], [1, 0, 1]], [[1, 0, 1], [1, 0, 1]]],
             [[[0, 0, 1], [0, 0, 1]], [[0, 0, 1], [0, 0, 1]]],
             [[[1, 1, 1], [1, 1, 1]], [[1, 1, 1], [1, 1, 1]]],
         ],
         dtype=np.float32,
     )
     # Create a one_dimensional weights cube by slicing the
     # larger weights cube.
     # The resulting cube only has a forecast_reference_time coordinate.
     self.one_dimensional_weights_cube = self.spatial_weights_cube[:, 0, 0,
                                                                   0]
     self.one_dimensional_weights_cube.remove_coord(
         "projection_x_coordinate")
     self.one_dimensional_weights_cube.remove_coord(
         "projection_y_coordinate")
     self.one_dimensional_weights_cube.remove_coord(
         find_threshold_coordinate(self.one_dimensional_weights_cube))
     self.one_dimensional_weights_cube.data = np.array([0.2, 0.5, 0.3],
                                                       dtype=np.float32)
     self.plugin = SpatiallyVaryingWeightsFromMask()
Exemple #28
0
    def test_incorrect_units(self):
        """Test that check_input_cubes method raises an error if the units are
        incompatible between the input cube and the decision tree."""
        plugin = WeatherSymbols()

        msg = "Unable to convert from"
        threshold_coord = find_threshold_coordinate(self.cubes[0])
        self.cubes[0].coord(threshold_coord).units = Unit('mm kg-1')
        with self.assertRaisesRegex(ValueError, msg):
            plugin.check_input_cubes(self.cubes)
 def test_result_cube_has_no_air_temperature_threshold_coordinate(self):
     """
     Test that the plugin returns a cube with coordinates that
     do not include a threshold-type coordinate.
     """
     result = Plugin()._probabilities_to_percentiles(
         self.cube, self.percentiles, self.bounds_pairing)
     try:
         threshold_coord = find_threshold_coordinate(result)
     except CoordinateNotFoundError:
         threshold_coord = None
     self.assertIsNone(threshold_coord)
Exemple #30
0
 def setUp(self):
     """Set up information for testing."""
     self.changes = {
         'points': [2.0],
         'bounds': [0.1, 2.0],
         'units': 'mm',
         'var_name': 'threshold'
     }
     cube = create_cube_with_threshold()
     self.coord_name = find_threshold_coordinate(cube).name()
     cube.remove_coord(self.coord_name)
     self.cube = iris.util.squeeze(cube)