示例#1
0
    def test_lots_of_percentiles(self):
        """
        Test that the plugin returns an Iris.cube.Cube with the expected
        data values for the percentiles, if lots of percentile values are
        requested.
        """
        data = np.array(
            [[[[13.9, -16., 10.2], [-28., -16., -35.2], [-35.2, -37., -37.6]]],
             [[[17.7, 8.25, 10.6], [-4., 8.25, -25.6], [-25.6, -31., -32.8]]],
             [[[21.5, 8.75, 11.], [8.33333333, 8.75, -16.], [-16., -25., -28.]]
              ], [[[25.3, 9.25, 11.4], [9., 9.25, -6.4], [-6.4, -19., -23.2]]],
             [[[29.1, 9.75, 11.8], [9.66666667, 9.75, 3.2], [3.2, -13., -18.4]]
              ],
             [[[32.9, 10.33333333, 15.8], [10.33333333, 10.2, 8.5],
               [8.33333333, -7., -13.6]]],
             [[[36.7, 11., 23.4], [11., 10.6, 9.5], [9., -1., -8.8]]],
             [[[40.5, 11.66666667, 31.], [11.66666667, 11., 10.5],
               [9.66666667, 5., -4.]]],
             [[[44.3, 21.5, 38.6], [21.5, 11.4, 11.5], [10.5, 8.5, 0.8]]],
             [[[48.1, 40.5, 46.2], [40.5, 11.8, 31.], [11.5, 9.5, 5.6]]]],
            dtype=np.float32)

        cube = self.current_temperature_forecast_cube
        percentiles = np.arange(5, 100, 10)
        bounds_pairing = (-40, 50)
        plugin = Plugin()
        result = plugin._probabilities_to_percentiles(cube, percentiles,
                                                      bounds_pairing)
        self.assertArrayAlmostEqual(result.data, data, decimal=5)
    def test_transpose_cube_dimensions(self):
        """
        Test that the plugin returns an the expected data, when comparing
        input cubes which have dimensions in a different order.
        """
        # Calculate result for nontransposed cube.
        cube = self.current_temperature_forecast_cube
        percentiles = [10, 50, 90]
        bounds_pairing = (-40, 50)
        plugin = Plugin()
        nontransposed_result = plugin._probabilities_to_percentiles(
            cube, percentiles, bounds_pairing)

        # Calculate result for transposed cube.
        # Original cube dimensions are [P, T, Y, X].
        # Transposed cube dimensions are [X, Y, T, P].
        cube.transpose([3, 2, 1, 0])
        transposed_result = plugin._probabilities_to_percentiles(
            cube, percentiles, bounds_pairing)

        # Result cube will be [P, X, Y, T]
        # Transpose cube to be [P, T, Y, X]
        transposed_result.transpose([0, 3, 2, 1])
        self.assertArrayAlmostEqual(nontransposed_result.data,
                                    transposed_result.data)
示例#3
0
    def test_check_single_threshold(self):
        """
        Test that the plugin returns an Iris.cube.Cube with the expected
        data values for the percentiles, if a single threshold is used for
        constructing the percentiles.
        """
        data = np.array(
            [[[[12.2, 8., 12.2], [-16., 8., -30.4], [-30.4, -34., -35.2]]],
             [[[29., 26.66666667, 29.], [23.75, 26.66666667, 8.],
               [8., -10., -16.]]],
             [[[45.8, 45.33333333, 45.8], [44.75, 45.33333333, 41.6],
               [41.6, 29., 3.2]]]],
            dtype=np.float32)

        threshold_coord = find_threshold_coordinate(
            self.current_temperature_forecast_cube)
        for acube in self.current_temperature_forecast_cube.slices_over(
                threshold_coord):
            cube = acube
            break
        percentiles = [10, 50, 90]
        bounds_pairing = (-40, 50)
        plugin = Plugin()
        result = plugin._probabilities_to_percentiles(cube, percentiles,
                                                      bounds_pairing)
        self.assertArrayAlmostEqual(result.data, data, decimal=5)
示例#4
0
    def _extract_error_percentiles(self, error_probability_cube,
                                   error_percentiles_count):
        """Extract error percentile values from the error exceedence probabilities.

        Args:
            error_probability_cube:
                A cube containing error exceedence probabilities.
            error_percentiles_count:
                The number of error percentiles to extract. The resulting percentiles
                will be evenly spaced over the interval (0, 100).

        Returns:
            Cube containing percentile values for the error distributions.
        """
        error_percentiles = choose_set_of_percentiles(
            error_percentiles_count,
            sampling="quantile",
        )
        error_percentiles_cube = ConvertProbabilitiesToPercentiles().process(
            error_probability_cube, percentiles=error_percentiles)
        if len(error_percentiles_cube.coord_dims("realization")) == 0:
            error_percentiles_cube = new_axis(error_percentiles_cube,
                                              "realization")

        return error_percentiles_cube
示例#5
0
 def test_basic(self):
     """Test that the plugin returns an Iris.cube.Cube with the expected name"""
     result = Plugin()._probabilities_to_percentiles(
         self.cube, self.percentiles, self.bounds_pairing
     )
     self.assertIsInstance(result, Cube)
     self.assertEqual(result.name(), "air_temperature")
示例#6
0
    def test_lots_of_probability_thresholds(self):
        """
        Test that the plugin returns an Iris.cube.Cube with the expected
        data values for the percentiles, if there are lots of thresholds.
        """
        input_probs_1d = np.linspace(1, 0, 30)
        input_probs = np.tile(input_probs_1d, (3, 3, 1, 1)).T

        data = np.array(
            [[[[2.9, 2.9, 2.9], [2.9, 2.9, 2.9], [2.9, 2.9, 2.9]]],
             [[[14.5, 14.5, 14.5], [14.5, 14.5, 14.5], [14.5, 14.5, 14.5]]],
             [[[26.099998, 26.099998, 26.099998],
               [26.099998, 26.099998, 26.099998],
               [26.099998, 26.099998, 26.099998]]]],
            dtype=np.float32)

        temperature_values = np.arange(0, 30)
        cube = (add_forecast_reference_time_and_forecast_period(
            set_up_probability_threshold_cube(
                input_probs,
                "air_temperature",
                "degreesC",
                forecast_thresholds=temperature_values,
                spp__relative_to_threshold='above')))
        percentiles = [10, 50, 90]
        bounds_pairing = (-40, 50)
        plugin = Plugin()
        result = plugin._probabilities_to_percentiles(cube, percentiles,
                                                      bounds_pairing)

        self.assertArrayAlmostEqual(result.data, data)
    def test_probabilities_not_monotonically_increasing(
            self, warning_list=None):
        """
        Test that the plugin raises a Warning when the probabilities
        of the Cumulative Distribution Function are not monotonically
        increasing.
        """
        data = np.array([0.05, 0.7, 0.95])
        data = data[:, np.newaxis, np.newaxis, np.newaxis]

        self.current_temperature_forecast_cube = add_forecast_reference_time_and_forecast_period(
            set_up_probability_threshold_cube(
                data,
                "air_temperature",
                "degreesC",
                forecast_thresholds=[8, 10, 12],
                y_dimension_length=1,
                x_dimension_length=1,
                spp__relative_to_threshold="above",
            ))
        cube = self.current_temperature_forecast_cube
        percentiles = [10, 50, 90]
        bounds_pairing = (-40, 50)
        plugin = Plugin()
        warning_msg = "The probability values used to construct the"
        plugin._probabilities_to_percentiles(cube, percentiles, bounds_pairing)
        self.assertTrue(any(warning_msg in str(item) for item in warning_list))
    def test_simple_check_data_below(self):
        """
        Test that the plugin returns an Iris.cube.Cube with the expected
        data values for the percentiles when input probabilities are given
        for being below a threshold.
        The input cube contains probabilities that values are below a given
        threshold.
        """
        expected = np.array([8.4, 10.61538462, 11.84615385])
        expected = expected[:, np.newaxis, np.newaxis, np.newaxis]

        data = np.array([0.95, 0.3, 0.05])[::-1]
        data = data[:, np.newaxis, np.newaxis, np.newaxis]

        self.current_temperature_forecast_cube = add_forecast_reference_time_and_forecast_period(
            set_up_probability_threshold_cube(
                data,
                "air_temperature",
                "degreesC",
                forecast_thresholds=[8, 10, 12],
                y_dimension_length=1,
                x_dimension_length=1,
                spp__relative_to_threshold="above",
            ))
        cube = self.current_temperature_forecast_cube
        cube.coord(var_name="threshold"
                   ).attributes["spp__relative_to_threshold"] = "below"
        percentiles = [10, 50, 90]
        bounds_pairing = (-40, 50)
        plugin = Plugin()
        result = plugin._probabilities_to_percentiles(cube, percentiles,
                                                      bounds_pairing)
        self.assertArrayAlmostEqual(result.data, expected)
 def test_basic(self):
     """Test that the plugin returns an Iris.cube.Cube."""
     cube = self.current_temperature_forecast_cube
     percentiles = [10, 50, 90]
     bounds_pairing = (-40, 50)
     plugin = Plugin()
     result = plugin._probabilities_to_percentiles(cube, percentiles,
                                                   bounds_pairing)
     self.assertIsInstance(result, Cube)
 def test_check_data_specifying_single_percentile_not_as_list(self):
     """
     Test that the plugin returns an Iris.cube.Cube with the expected
     data values for a specific percentile passed in as a value.
     """
     expected_data = np.array([self.percentile_25])
     cube = self.current_temperature_forecast_cube
     plugin = Plugin()
     result = plugin.process(cube, percentiles=25)
     self.assertArrayAlmostEqual(result.data, expected_data, decimal=5)
 def test_basic(self):
     """Test that the plugin returns two numpy arrays."""
     cube = self.current_temperature_forecast_cube
     probabilities_for_cdf = cube.data.reshape(3, 9)
     bounds_pairing = (-40, 50)
     plugin = Plugin()
     result = plugin._add_bounds_to_thresholds_and_probabilities(
         self.threshold_points, probabilities_for_cdf, bounds_pairing)
     self.assertIsInstance(result[0], np.ndarray)
     self.assertIsInstance(result[1], np.ndarray)
示例#12
0
 def test_percentile_coord(self):
     """Test that the plugin returns an Iris.cube.Cube with an appropriate
     percentile coordinate with suitable units.
     """
     result = Plugin()._probabilities_to_percentiles(
         self.cube, self.percentiles, self.bounds_pairing
     )
     self.assertIsInstance(result.coord("percentile"), DimCoord)
     self.assertArrayEqual(result.coord("percentile").points, self.percentiles)
     self.assertEqual(result.coord("percentile").units, unit.Unit("%"))
 def test_return_name(self):
     """Test that the plugin returns an Iris.cube.Cube with an appropriate
     name.
     """
     cube = self.current_temperature_forecast_cube
     percentiles = [10, 50, 90]
     bounds_pairing = (-40, 50)
     plugin = Plugin()
     result = plugin._probabilities_to_percentiles(cube, percentiles,
                                                   bounds_pairing)
     self.assertEqual(result.name(), "air_temperature")
 def test_check_data_not_specifying_percentiles(self):
     """
     Test that the plugin returns an Iris.cube.Cube with the expected
     data values without specifying the number of percentiles.
     """
     expected_data = np.array(
         [self.percentile_25, self.percentile_50, self.percentile_75])
     cube = self.current_temperature_forecast_cube
     plugin = Plugin()
     result = plugin.process(cube)
     self.assertArrayAlmostEqual(result.data, expected_data, decimal=5)
 def test_new_endpoints_generation(self):
     """Test that the plugin re-applies the threshold bounds using the
     maximum and minimum threshold points values when the original bounds
     have been exceeded and ecc_bounds_warning has been set."""
     probabilities_for_cdf = np.array([[0.05, 0.7, 0.95]])
     threshold_points = np.array([-50, 10, 60])
     plugin = Plugin(ecc_bounds_warning=True)
     result = plugin._add_bounds_to_thresholds_and_probabilities(
         threshold_points, probabilities_for_cdf, self.bounds_pairing)
     self.assertEqual(max(result[0]), max(threshold_points))
     self.assertEqual(min(result[0]), min(threshold_points))
 def test_unknown_thresholding(self):
     """Test that the plugin returns an Iris.cube.Cube."""
     cube = self.current_temperature_forecast_cube
     cube.coord(var_name="threshold"
                ).attributes["spp__relative_to_threshold"] = "between"
     percentiles = [10, 50, 90]
     bounds_pairing = (-40, 50)
     plugin = Plugin()
     msg = "Probabilities to percentiles only implemented for"
     with self.assertRaisesRegex(NotImplementedError, msg):
         plugin._probabilities_to_percentiles(cube, percentiles,
                                              bounds_pairing)
示例#17
0
 def test_metadata(self):
     """Test name and cell methods are updated as expected after conversion"""
     threshold_coord = find_threshold_coordinate(self.cube)
     expected_name = threshold_coord.name()
     expected_units = threshold_coord.units
     # add a cell method indicating "max in period" for the underlying data
     self.cube.add_cell_method(
         CellMethod("max", coords="time", comments=f"of {expected_name}"))
     expected_cell_method = CellMethod("max", coords="time")
     result = Plugin().process(self.cube)
     self.assertEqual(result.name(), expected_name)
     self.assertEqual(result.units, expected_units)
     self.assertEqual(result.cell_methods[0], expected_cell_method)
 def test_return_coord_units(self):
     """Test that the plugin returns an Iris.cube.Cube with an appropriate
     percentile coordinate with suitable units.
     """
     cube = self.current_temperature_forecast_cube
     percentiles = [10, 50, 90]
     bounds_pairing = (-40, 50)
     plugin = Plugin()
     result = plugin._probabilities_to_percentiles(cube, percentiles,
                                                   bounds_pairing)
     self.assertIsInstance(result.coord("percentile"), DimCoord)
     self.assertArrayEqual(result.coord("percentile").points, percentiles)
     self.assertEqual(result.coord("percentile").units, unit.Unit("%"))
 def test_result_cube_has_no_air_temperature_threshold_coordinate(self):
     """
     Test that the plugin returns a cube with coordinates that
     do not include the air_temperature_threshold coordinate.
     """
     cube = self.current_temperature_forecast_cube
     percentiles = [10, 50, 90]
     bounds_pairing = (-40, 50)
     plugin = Plugin()
     result = plugin._probabilities_to_percentiles(cube, percentiles,
                                                   bounds_pairing)
     for coord in result.coords():
         self.assertNotEqual(coord.name(), "threshold")
 def test_bounds_of_threshold_points(self):
     """
     Test that the plugin returns the expected results for the
     threshold_points, where they've been padded with the values from
     the bounds_pairing.
     """
     cube = self.current_temperature_forecast_cube
     probabilities_for_cdf = cube.data.reshape(3, 9)
     bounds_pairing = (-40, 50)
     plugin = Plugin()
     result = plugin._add_bounds_to_thresholds_and_probabilities(
         self.threshold_points, probabilities_for_cdf, bounds_pairing)
     self.assertArrayAlmostEqual(result[0][0], bounds_pairing[0])
     self.assertArrayAlmostEqual(result[0][-1], bounds_pairing[1])
 def test_check_data_over_specifying_percentiles(self):
     """
     Test that the plugin raises a suitable error when both a number and set
     or percentiles are specified.
     """
     no_of_percentiles = 3
     percentiles = [25, 50, 75]
     cube = self.current_temperature_forecast_cube
     plugin = Plugin()
     msg = "Cannot specify both no_of_percentiles and percentiles"
     with self.assertRaisesRegex(ValueError, msg):
         plugin.process(cube,
                        no_of_percentiles=no_of_percentiles,
                        percentiles=percentiles)
 def test_endpoints_of_distribution_exceeded(self):
     """
     Test that the plugin raises a ValueError when the constant
     end points of the distribution are exceeded by a threshold value
     used in the forecast.
     """
     probabilities_for_cdf = np.array([[0.05, 0.7, 0.95]])
     threshold_points = np.array([8, 10, 60])
     bounds_pairing = (-40, 50)
     plugin = Plugin()
     msg = "The calculated threshold values"
     with self.assertRaisesRegex(ValueError, msg):
         plugin._add_bounds_to_thresholds_and_probabilities(
             threshold_points, probabilities_for_cdf, bounds_pairing)
示例#23
0
 def test_endpoints_of_distribution_exceeded_warning(self, warning_list=None):
     """
     Test that the plugin raises a warning message when the constant
     end points of the distribution are exceeded by a threshold value
     used in the forecast and the ecc_bounds_warning keyword argument
     has been specified.
     """
     probabilities_for_cdf = np.array([[0.05, 0.7, 0.95]])
     threshold_points = np.array([8, 10, 60])
     plugin = Plugin(ecc_bounds_warning=True)
     warning_msg = "The calculated threshold values"
     plugin._add_bounds_to_thresholds_and_probabilities(
         threshold_points, probabilities_for_cdf, self.bounds_pairing
     )
     self.assertTrue(any(warning_msg in str(item) for item in warning_list))
 def test_probability_data(self):
     """
     Test that the plugin returns the expected results for the
     probabilities, where they've been padded with zeros and ones to
     represent the extreme ends of the Cumulative Distribution Function.
     """
     cube = self.current_temperature_forecast_cube
     probabilities_for_cdf = cube.data.reshape(3, 9)
     zero_array = np.zeros(probabilities_for_cdf[:, 0].shape)
     one_array = np.ones(probabilities_for_cdf[:, 0].shape)
     bounds_pairing = (-40, 50)
     plugin = Plugin()
     result = plugin._add_bounds_to_thresholds_and_probabilities(
         self.threshold_points, probabilities_for_cdf, bounds_pairing)
     self.assertArrayAlmostEqual(result[1][:, 0], zero_array)
     self.assertArrayAlmostEqual(result[1][:, -1], one_array)
示例#25
0
    def process(self, cube: Cube) -> Cube:
        """Expected value calculation and metadata updates.

        Args:
            cube:
                Probabilistic data with a realization, threshold or percentile
                representation.

        Returns:
            Expected value of probability distribution. Same shape as input cube
            but with realization/threshold/percentile coordinate removed.
        """
        if is_probability(cube):
            # TODO: replace this with direct calculation of the integral over
            # probability thresholds. The current approach works and has the
            # correct interface, but some accuracy will be lost in the conversion
            # and memory usage is very high.
            #
            # 19 percentiles corresponds to 5, 10, 15...95%.
            cube = ConvertProbabilitiesToPercentiles().process(
                cube, no_of_percentiles=19)
        if is_percentile(cube):
            cube = RebadgePercentilesAsRealizations().process(cube)
        mean_cube = collapse_realizations(cube)
        mean_cube.add_cell_method(CellMethod("mean", coords="realization"))
        return mean_cube
    def test_check_data_multiple_timesteps(self):
        """
        Test that the plugin returns an Iris.cube.Cube with the expected
        data values for the percentiles.
        """
        expected = np.array(
            [
                [[[8.0, 8.0], [-8.0, 8.66666667]], [[8.0, -16.0], [8.0, -16.0]]
                 ],
                [[[12.0, 12.0], [12.0, 12.0]], [[10.5, 10.0], [10.5, 10.0]]],
                [[[31.0, 31.0], [31.0, 31.0]],
                 [[11.5, 11.33333333], [11.5, 12.0]]],
            ],
            dtype=np.float32,
        )

        data = np.array(
            [
                [[[0.8, 0.8], [0.7, 0.9]], [[0.8, 0.6], [0.8, 0.6]]],
                [[[0.6, 0.6], [0.6, 0.6]], [[0.5, 0.4], [0.5, 0.4]]],
                [[[0.4, 0.4], [0.4, 0.4]], [[0.1, 0.1], [0.1, 0.2]]],
            ],
            dtype=np.float32,
        )

        cube = set_up_probability_threshold_cube(
            data,
            "air_temperature",
            "degreesC",
            timesteps=2,
            x_dimension_length=2,
            y_dimension_length=2,
            spp__relative_to_threshold="above",
        )
        self.probability_cube = add_forecast_reference_time_and_forecast_period(
            cube,
            time_point=np.array([402295.0, 402296.0]),
            fp_point=[2.0, 3.0])
        cube = self.probability_cube
        percentiles = [20, 60, 80]
        bounds_pairing = (-40, 50)
        plugin = Plugin()
        result = plugin._probabilities_to_percentiles(cube, percentiles,
                                                      bounds_pairing)
        self.assertArrayAlmostEqual(result.data, expected, decimal=5)
示例#27
0
    def test_check_data_spot_forecasts(self):
        """
        Test that the plugin returns an Iris.cube.Cube with the expected
        data values for the percentiles for spot forecasts.
        """
        data = np.array(
            [[[15.8, 8., 10.4, -16., 8., -30.4, -30.4, -34., -35.2]],
             [[31., 10., 12., 10., 10., 8., 8., -10., -16.]],
             [[46.2, 31., 42.4, 31., 11.6, 12., 11., 9., 3.2]]],
            dtype=np.float32)

        cube = self.current_temperature_spot_forecast_cube
        percentiles = [10, 50, 90]
        bounds_pairing = (-40, 50)
        plugin = Plugin()
        result = plugin._probabilities_to_percentiles(cube, percentiles,
                                                      bounds_pairing)
        self.assertArrayAlmostEqual(result.data, data, decimal=5)
示例#28
0
 def test_endpoints_of_distribution_exceeded_warning(
         self, warning_list=None):
     """
     Test that the plugin raises a warning message when the constant
     end points of the distribution are exceeded by a threshold value
     used in the forecast and the ecc_bounds_warning keyword argument
     has been specified.
     """
     probabilities_for_cdf = np.array([[0.05, 0.7, 0.95]])
     threshold_points = np.array([8, 10, 60])
     plugin = Plugin(ecc_bounds_warning=True)
     warning_msg = (
         "The calculated threshold values [-40   8  10  60  50] are "
         "not in ascending order as required for the cumulative distribution "
         "function (CDF). This is due to the threshold values exceeding "
         "the range given by the ECC bounds (-40, 50). The threshold "
         "points that have exceeded the existing bounds will be used as "
         "new bounds.")
     plugin._add_bounds_to_thresholds_and_probabilities(
         threshold_points, probabilities_for_cdf, self.bounds_pairing)
     self.assertTrue(any(warning_msg in str(item) for item in warning_list))
    def test_transpose_cube_dimensions(self):
        """
        Test that the plugin returns an the expected data, when comparing
        input cubes which have dimensions in a different order.
        """
        # Calculate result for nontransposed cube.
        nontransposed_result = Plugin()._probabilities_to_percentiles(
            self.cube, self.percentiles, self.bounds_pairing)

        # Calculate result for transposed cube.
        # Original cube dimensions are [P, Y, X].
        # Transposed cube dimensions are [X, Y, P].
        self.cube.transpose([2, 1, 0])
        transposed_result = Plugin()._probabilities_to_percentiles(
            self.cube, self.percentiles, self.bounds_pairing)

        # Result cube will be [P, X, Y]
        # Transpose cube to be [P, Y, X]
        transposed_result.transpose([0, 2, 1])
        self.assertArrayAlmostEqual(nontransposed_result.data,
                                    transposed_result.data)
示例#30
0
def process(
    neighbour_cube: cli.inputcube,
    cube: cli.inputcube,
    lapse_rate: cli.inputcube = None,
    *,
    apply_lapse_rate_correction=False,
    land_constraint=False,
    similar_altitude=False,
    extract_percentiles: cli.comma_separated_list = None,
    ignore_ecc_bounds=False,
    new_title: str = None,
    suppress_warnings=False,
):
    """Module to run spot data extraction.

    Extract diagnostic data from gridded fields for spot data sites. It is
    possible to apply a temperature lapse rate adjustment to temperature data
    that helps to account for differences between the spot site's real altitude
    and that of the grid point from which the temperature data is extracted.

    Args:
        neighbour_cube (iris.cube.Cube):
            Cube of spot-data neighbours and the spot site information.
        cube (iris.cube.Cube):
            Cube containing the diagnostic data to be extracted.
        lapse_rate (iris.cube.Cube):
            Optional cube containing temperature lapse rates. If this cube is
            provided and a screen temperature cube is being processed, the
            lapse rates will be used to adjust the temperature to better
            represent each spot's site-altitude.
        apply_lapse_rate_correction (bool):
            Use to apply a lapse-rate correction to screen temperature data so
            that the data are a better match the altitude of the spot site for
            which they have been extracted.
        land_constraint (bool):
            Use to select the nearest-with-land-constraint neighbour-selection
            method from the neighbour_cube. This means that the grid points
            should be land points except for sites where none were found within
            the search radius when the neighbour cube was created. May be used
            with similar_altitude.
        similar_altitude (bool):
            Use to select the nearest-with-height-constraint
            neighbour-selection method from the neighbour_cube. These are grid
            points that were found to be the closest in altitude to the spot
            site within the search radius defined when the neighbour cube was
            created. May be used with land_constraint.
        extract_percentiles (list or int):
            If set to a percentile value or a list of percentile values,
            data corresponding to those percentiles will be returned. For
            example "25, 50, 75" will result in the 25th, 50th and 75th
            percentiles being returned from a cube of probabilities,
            percentiles or realizations. Deterministic input data will raise
            a warning message.
            Note that for percentiles inputs, the desired percentile(s) must
            exist in the input cube.
        ignore_ecc_bounds (bool):
            Demotes exceptions where calculated percentiles are outside the ECC
            bounds range to warnings.
        new_title (str):
            New title for the spot-extracted data.  If None, this attribute is
            removed from the output cube since it has no prescribed standard
            and may therefore contain grid information that is no longer
            correct after spot-extraction.
        suppress_warnings (bool):
            Suppress warning output. This option should only be used if it
            is known that warnings will be generated but they are not required.

    Returns:
        iris.cube.Cube:
           Cube of spot data.

    Raises:
        ValueError:
            If the percentile diagnostic cube does not contain the requested
            percentile value.
        ValueError:
            If the lapse rate cube was provided but the diagnostic being
            processed is not air temperature.
        ValueError:
            If the lapse rate cube provided does not have the name
            "air_temperature_lapse_rate"
        ValueError:
            If the lapse rate cube does not contain a single valued height
            coordinate.

    Warns:
        warning:
           If diagnostic cube is not a known probabilistic type.
        warning:
            If a lapse rate cube was provided, but the height of the
            temperature does not match that of the data used.
        warning:
            If a lapse rate cube was not provided, but the option to apply
            the lapse rate correction was enabled.

    """

    import warnings

    import iris
    import numpy as np
    from iris.exceptions import CoordinateNotFoundError

    from improver.ensemble_copula_coupling.ensemble_copula_coupling import (
        ConvertProbabilitiesToPercentiles, )
    from improver.metadata.probabilistic import find_percentile_coordinate
    from improver.percentile import PercentileConverter
    from improver.spotdata.apply_lapse_rate import SpotLapseRateAdjust
    from improver.spotdata.neighbour_finding import NeighbourSelection
    from improver.spotdata.spot_extraction import SpotExtraction
    from improver.utilities.cube_extraction import extract_subcube

    neighbour_selection_method = NeighbourSelection(
        land_constraint=land_constraint,
        minimum_dz=similar_altitude).neighbour_finding_method_name()
    result = SpotExtraction(
        neighbour_selection_method=neighbour_selection_method)(
            neighbour_cube, cube, new_title=new_title)

    # If a probability or percentile diagnostic cube is provided, extract
    # the given percentile if available. This is done after the spot-extraction
    # to minimise processing time; usually there are far fewer spot sites than
    # grid points.
    if extract_percentiles:
        extract_percentiles = [np.float32(x) for x in extract_percentiles]
        try:
            perc_coordinate = find_percentile_coordinate(result)
        except CoordinateNotFoundError:
            if "probability_of_" in result.name():
                result = ConvertProbabilitiesToPercentiles(
                    ecc_bounds_warning=ignore_ecc_bounds)(
                        result, percentiles=extract_percentiles)
                result = iris.util.squeeze(result)
            elif result.coords("realization", dim_coords=True):
                fast_percentile_method = not np.ma.isMaskedArray(result.data)
                result = PercentileConverter(
                    "realization",
                    percentiles=extract_percentiles,
                    fast_percentile_method=fast_percentile_method,
                )(result)
            else:
                msg = ("Diagnostic cube is not a known probabilistic type. "
                       "The {} percentile could not be extracted. Extracting "
                       "data from the cube including any leading "
                       "dimensions.".format(extract_percentiles))
                if not suppress_warnings:
                    warnings.warn(msg)
        else:
            constraint = [
                "{}={}".format(perc_coordinate.name(), extract_percentiles)
            ]
            perc_result = extract_subcube(result, constraint)
            if perc_result is not None:
                result = perc_result
            else:
                msg = ("The percentile diagnostic cube does not contain the "
                       "requested percentile value. Requested {}, available "
                       "{}".format(extract_percentiles,
                                   perc_coordinate.points))
                raise ValueError(msg)
    # Check whether a lapse rate cube has been provided and we are dealing with
    # temperature data and the lapse-rate option is enabled.
    if apply_lapse_rate_correction and lapse_rate:
        if not result.name() == "air_temperature":
            msg = ("A lapse rate cube was provided, but the diagnostic being "
                   "processed is not air temperature and cannot be adjusted.")
            raise ValueError(msg)

        if not lapse_rate.name() == "air_temperature_lapse_rate":
            msg = ("A cube has been provided as a lapse rate cube but does "
                   "not have the expected name air_temperature_lapse_rate: "
                   "{}".format(lapse_rate.name()))
            raise ValueError(msg)

        try:
            lapse_rate_height_coord = lapse_rate.coord("height")
        except (ValueError, CoordinateNotFoundError):
            msg = ("Lapse rate cube does not contain a single valued height "
                   "coordinate. This is required to ensure it is applied to "
                   "equivalent temperature data.")
            raise ValueError(msg)

        # Check the height of the temperature data matches that used to
        # calculate the lapse rates. If so, adjust temperatures using the lapse
        # rate values.
        if cube.coord("height") == lapse_rate_height_coord:
            plugin = SpotLapseRateAdjust(
                neighbour_selection_method=neighbour_selection_method)
            result = plugin(result, neighbour_cube, lapse_rate)
        elif not suppress_warnings:
            warnings.warn(
                "A lapse rate cube was provided, but the height of the "
                "temperature data does not match that of the data used "
                "to calculate the lapse rates. As such the temperatures "
                "were not adjusted with the lapse rates.")

    elif apply_lapse_rate_correction and not lapse_rate:
        if not suppress_warnings:
            warnings.warn(
                "A lapse rate cube was not provided, but the option to "
                "apply the lapse rate correction was enabled. No lapse rate "
                "correction could be applied.")

    # Remove the internal model_grid_hash attribute if present.
    result.attributes.pop("model_grid_hash", None)
    return result