def interpolate_alongacross(Processes,
                            Track,
                            cell,
                            dx=500,
                            width=10000,
                            z_coord='model_level_number',
                            height_levels=np.arange(2, 20000, 2000)):
    '''
    function description
    '''
    from iris import Constraint
    from iris.cube import CubeList
    Track_cell = Track[Track['cell'] == cell]
    time = Track_cell['time'].values
    x = Track_cell['projection_x_coordinate'].values
    y = Track_cell['projection_y_coordinate'].values
    alpha = calculate_alpha_all(x, y)

    cubelist_Processes_along = CubeList()
    cubelist_Processes_across = CubeList()

    for i, time_i in enumerate(time):
        logging.debug(time_i)
        constraint_time = Constraint(time=time_i)
        grid_along, grid_across = make_cubes_alongacross(
            x=x[i],
            y=y[i],
            alpha=alpha[i],
            cube_sample=Processes.extract(constraint_time)[0],
            dx=dx,
            width=width,
            z_coord=z_coord,
            height_levels=height_levels)

        Processes_along_i, Processes_across_i = interpolate_to_cubes(
            Processes.extract(constraint_time),
            grid_along,
            grid_across,
            z_coord=z_coord)

        cubelist_Processes_along.extend(Processes_along_i)
        cubelist_Processes_across.extend(Processes_across_i)
    Processes_along = cubelist_Processes_along.concatenate()
    Processes_across = cubelist_Processes_across.concatenate()

    return Processes_along, Processes_across
Example #2
0
    def constrained_inputcubelist_converter(to_convert):
        """Passes the cube and constraints onto maybe_coerce_with.

        Args:
            to_convert (string):
                The filename to be loaded.

        Returns:
            iris.cube.CubeList:
                The loaded cubelist of constrained cubes.

        Raises:
            ValueError:
                Each constraint (either a string or a list) is expected to
                return a single match. An error is raised if no match or more
                than one match is found.
        """
        from improver.utilities.load import load_cube
        from iris.cube import CubeList

        cubelist = CubeList()
        for constr in constraints:
            constr_list = [constr] if isinstance(constr, str) else constr
            found_cubes = []
            for constr_item in constr_list:
                try:
                    found_cubes.append(maybe_coerce_with(
                        load_cube, to_convert, constraints=constr_item))
                except ValueError:
                    pass
            if len(found_cubes) != 1:
                msg = (f"Incorrect number of valid inputs available for the "
                       "{constr} constraint. "
                       f"Number of valid inputs: {len(found_cubes)} "
                       f"The valid inputs found are: {found_cubes}")
                raise ValueError(msg)
            cubelist.extend(found_cubes)

        return cubelist
Example #3
0
class TestMdsRegrid(unittest.TestCase):
    def setUp(self):
        # Create sample cubes for testing
        self.topo_aps2 = topo_aps2
        self.topo_aps3 = topo_aps3
        self.lsm_aps2 = lsm_aps2
        self.lsm_aps3 = lsm_aps3
        self.t_scn_aps3 = t_scn_aps3
        self.sfc_prs_aps3 = sfc_prs_aps3
        self.precip_aps3 = precip_aps3
        self.dpt_scn_aps3 = dpt_scn_aps3
        self.q_scn_aps3 = q_scn_aps3

        # Note that u10 is sitting on different longitude (x)
        self.u10_aps3 = u10_aps3

        self.v10_aps3 = v10_aps3

        # For (x, y), it should be (lon, lat) accordingly
        self.x_tgt = self.topo_tgt.coord('longitude').points
        self.y_tgt = self.topo_tgt.coord('latitude').points
        self.x_src = self.topo_src.coord('longitude').points
        self.y_src = self.topo_src.coord('latitude').points

        # This Iris derived will be used in later
        self.drv_t_scn_iris = regrid_cube_by_scheme(
                                self.t_scn_src, self.topo_tgt,
                                scheme='linear')
        self.input_cubes = CubeList([])
        self.input_cubes.extend(
            [self.dpt_scn_src, self.sfc_prs_src, self.t_scn_src])

        self.in_grids = CubeList([])
        self.in_grids.extend([self.topo_src, self.lsm_src])
        self.out_grids = CubeList([])
        self.out_grids.extend([self.topo_tgt, self.lsm_tgt])

    def test_regrid_linear_iris(self):
        """
        Test the difference between IRIS and scipy linear regridding
        """
        t_scn_iris_data = self.drv_t_scn_iris.data
        # This interpolation is in the original form.
        # The "interp_by_scipy" is an updated function based on it.
        scipy_interp = RegularGridInterpolator(
            (self.x_src, self.y_src), self.t_scn_src.data, method='linear')

        t_scn_scipy_data = scipy_interp(
            list(product(self.x_tgt, self.y_tgt))).reshape(3, 3)

        self.assertAlmostEqual(t_scn_iris_data.any(),
                               t_scn_scipy_data.any())
        
    def test_regrid_linear_coastline_iris(self):
        """
        Test the hypothesis that IRIS regridding already applies the coastline
        inside its algorithms. Use scipy linear two stage regridding to prove.
        """
        # Calculate Scipy two stage data
        drv_t_scn_scipy = two_stage_interp(
            self.t_scn_src, self.topo_tgt, self.lsm_src, self.lsm_tgt)
        scipy_results = drv_t_scn_scipy.data
        iris_results = self.drv_t_scn_iris.data
        assert_almost_equal(iris_results, scipy_results, decimal=2)

    def test_t_scn_by_regrid_linear_iris_coastline_correction(self):
        """
        Testing the regrid_iris_coastline_correction
        """
        # Need to find the coastline points first
        # xv, yv = np.meshgrid(self.x_aps2, self.y_aps2)
        # tgrid = np.dstack((xv, yv))

        # weight = self.interpolator.compute_interp_weights(tgrid)
        # print(weight)
        # output_data = interpolator.interp_using_pre_computed_weights(weights)
        # Then add land/sea mask for coastline correction

        regridder = MdsRegridder(self.topo_aps3, self.topo_aps2,
                                 self.lsm_aps3, self.lsm_aps2)
        t_scn_by_iris_cc_list = \
            regridder._regrid_iris_coastline_correction(self.t_scn_aps3)
        [t_scn_by_iris_cc] = t_scn_by_iris_cc_list.extract('air_temperature')
        # Should be different with/without coastline correction
        # For the sample, only values at two mixed pnt are changed
        # Here 'Expected value' are values after the coastline correstion
        # Index  -/x   Delta     Target   Expected value   Actual Value
        # 1     x   0.031209  0.002000    300.695034      300.663825
        # 11     x   1.302968  0.002000    300.765625      299.462657
        self.show_difference(self.drv_t_scn_iris, t_scn_by_iris_cc,
                             entropy=1.334, accuracy=1.31)

    def test_t_scn_by_regrid_nearest_iris_coastline(self):
        """
        Test nearest regrid with coastline correction
        :return: Just show the difference between linear and nearest
        """
        # Get GFE results
        # drv_gfe_coastline_nearest = regrid_nearest_gfe_coastline_pigps(
        #    self.input_cubes, self.in_grids, self.out_grids)
        # drv_t_scn_gfe_coastline = extract_cube_by_name(
        #   drv_gfe_coastline_nearest, 'air_temperature')
        # Note: GFE results have 'nan' for the third column

        # Get Iris results
        drv_iris_coastline_nearest = regrid_nearest_iris_coastline(
            self.input_cubes, self.in_grids, self.out_grids)
        drv_t_scn_iris_coastline = extract_cube_by_name(
            drv_iris_coastline_nearest, 'air_temperature')

        self.show_difference(self.drv_t_scn_iris, drv_t_scn_iris_coastline,
                             entropy=7.050, accuracy=1.7)
	
    def show_difference(self, cube_a, cube_b, entropy=None, accuracy=None):
        """
        Show the difference between two data sets.
        :param cube_a: a cube with the examing data set
        :param cube_b: a cube with expected results
        :param accuracy: measurement for each points
        :param entropy: sum of total delta
        :return: show difference if fails; otherwise, test passes
        """
        result = cube_a.data.flatten()
        expected = cube_b.data.flatten()

        accuracy = np.repeat(accuracy, len(result))

        if accuracy is None:
            accuracy = np.repeat(0.001, len(result))
        if entropy is None:
            entropy = 0.02
        # Now assert the data is as expected.
        delta = np.abs(result - expected)
        msg = ('\nEntropy: {}\nExpected entropy: {}\n'
               'Index  -/x   Delta     Target   Expected value   Actual Value'
               ''.format(delta.sum(), entropy))
        template = '\n{0:3}     {1:2}  {2:6f}  {3:6f}    {4:6f}      {5:6f}'

        for i, (r_del, t_del, r, t) in enumerate(zip(delta, accuracy,
                                                     result, expected)):
            msg += template.format(i, '-' if r_del < t_del else 'x',
                                   r_del, t_del,
                                   t, r)
        # Ensure each accuracy component falls below the target.
        assert_array_less(delta, accuracy, msg)
        # Ensure that our entropy is close to the expected one.
        # If this fails because the result is now smaller than expected, good!
        # It means we can tighten the expected entropy *AND* the target delta.
        if np.abs(entropy - delta.sum()) > 0.001:
            self.fail(msg)
Example #4
0
def interpolate_alongacross_mean(Processes,
                                 Track,
                                 cell,
                                 dx,
                                 width,
                                 z_coord='model_level_number',
                                 height_level_borders=np.arange(
                                     0, 20000, 2000)):

    from iris import Constraint
    from iris.cube import CubeList
    Track_cell = Track[Track['cell'] == cell]
    time = Track_cell['time'].values
    x = Track_cell['projection_x_coordinate'].values
    y = Track_cell['projection_y_coordinate'].values
    alpha = calculate_alpha_all(x, y)

    cubelist_Processes_along = CubeList()
    cubelist_Processes_across = CubeList()

    for i, time_i in enumerate(time):
        logging.debug(time_i)

        constraint_time = Constraint(time=time_i)

        n_add_width = 2
        box_slice = [[
            x[i] - (width + n_add_width) * dx,
            x[i] + (width + n_add_width) * dx
        ],
                     [
                         y[i] - (width + n_add_width) * dx,
                         y[i] + (width + n_add_width) * dx
                     ]]

        x_min = box_slice[0][0]
        x_max = box_slice[0][1]
        y_min = box_slice[1][0]
        y_max = box_slice[1][1]

        constraint_x = Constraint(projection_x_coordinate=lambda cell: int(
            x_min) < cell < int(x_max))
        constraint_y = Constraint(projection_y_coordinate=lambda cell: int(
            y_min) < cell < int(y_max))

        constraint = constraint_time & constraint_x & constraint_y

        grid_along, grid_across = make_cubes_alongacross_mean(
            x=x[i],
            y=y[i],
            alpha=alpha[i],
            cube_sample=Processes.extract(constraint)[0],
            dx=dx,
            width=width,
            height_level_borders=height_level_borders)

        Processes_along_i, Processes_across_i = interpolate_to_cubes_mean(
            Processes.extract(constraint),
            grid_along,
            grid_across,
            height_level_borders=height_level_borders)

        cubelist_Processes_along.extend(Processes_along_i)
        cubelist_Processes_across.extend(Processes_across_i)
    Processes_along = cubelist_Processes_along.concatenate()
    Processes_across = cubelist_Processes_across.concatenate()

    return Processes_along, Processes_across
Example #5
0
def extract_cell_cubes_subset_2D(cubelist_in,
                                 mask,
                                 track,
                                 cell,
                                 z_coord='model_level_number'):
    import iris
    from iris import Constraint
    from iris.cube import CubeList
    import numpy as np
    from tobac import mask_cell_surface, get_bounding_box
    from copy import deepcopy

    track_i = track[track['cell'] == cell]

    cubelist_cell_sum = CubeList()

    for time_i in track_i['time'].values:

        logging.debug('start extracting cubes for cell ' + str(cell) +
                      ' and time ' + str(time_i))

        constraint_time = Constraint(time=time_i)
        mask_i = mask.extract(constraint_time)
        mask_cell_surface_i = mask_cell_surface(mask_i,
                                                cell,
                                                track_i,
                                                masked=False,
                                                z_coord=z_coord)

        x_dim = mask_cell_surface_i.coord_dims('projection_x_coordinate')[0]
        y_dim = mask_cell_surface_i.coord_dims('projection_y_coordinate')[0]
        x_coord = mask_cell_surface_i.coord('projection_x_coordinate')
        y_coord = mask_cell_surface_i.coord('projection_y_coordinate')

        if (mask_cell_surface_i.core_data() > 0).any():
            box_mask_i = get_bounding_box(mask_cell_surface_i.core_data(),
                                          buffer=1)

            box_mask = [[
                x_coord.points[box_mask_i[x_dim][0]],
                x_coord.points[box_mask_i[x_dim][1]]
            ],
                        [
                            y_coord.points[box_mask_i[y_dim][0]],
                            y_coord.points[box_mask_i[y_dim][1]]
                        ]]
        else:
            box_mask = [[np.nan, np.nan], [np.nan, np.nan]]

        width = 20
        dx = 500
        x = track_i[track_i['time'].values ==
                    time_i]['projection_x_coordinate'].values[0]
        y = track_i[track_i['time'].values ==
                    time_i]['projection_y_coordinate'].values[0]

        n_add_width = 2

        box_slice = [[
            x - (width + n_add_width) * dx, x + (width + n_add_width) * dx
        ], [y - (width + n_add_width) * dx, y + (width + n_add_width) * dx]]

        x_min = np.nanmin([box_mask[0][0], box_slice[0][0]])
        x_max = np.nanmax([box_mask[0][1], box_slice[0][1]])
        y_min = np.nanmin([box_mask[1][0], box_slice[1][0]])
        y_max = np.nanmax([box_mask[1][1], box_slice[1][1]])

        constraint_x = Constraint(projection_x_coordinate=lambda cell: int(
            x_min) < cell < int(x_max))
        constraint_y = Constraint(projection_y_coordinate=lambda cell: int(
            y_min) < cell < int(y_max))

        constraint = constraint_time & constraint_x & constraint_y

        mask_cell_surface_i = mask_cell_surface_i.extract(constraint_x
                                                          & constraint_y)
        cubelist_i = cubelist_in.extract(constraint)

        cubelist_cell_sum.extend(
            sum_mask_surface(cubelist_i, mask_cell_surface_i))
    logging.debug(str(cubelist_cell_sum))
    cubelist_cell_sum_out = cubelist_cell_sum.merge()
    logging.debug(str(cubelist_cell_sum_out))
    for cube in cubelist_cell_sum_out:
        logging.debug(str(cube))
        logging.debug(str(cube.attributes))
        logging.debug(str(cube.coords()))

    if len(cubelist_cell_sum_out) == 6:
        logging.debug(
            str(
                iris.util.describe_diff(cubelist_cell_sum_out[0],
                                        cubelist_cell_sum_out[3])))
        logging.debug(
            str(
                iris.util.describe_diff(cubelist_cell_sum_out[1],
                                        cubelist_cell_sum_out[4])))
        logging.debug(
            str(
                iris.util.describe_diff(cubelist_cell_sum_out[2],
                                        cubelist_cell_sum_out[5])))

    track_cell = deepcopy(track_i)
    for cube in cubelist_cell_sum_out:
        logging.debug(f'cube.shape: {cube.shape}')
        logging.debug(f'len(track_cell): {len(track_cell)}')
        logging.debug(f'cube.coord("time"): {cube.coord("time")}')
        logging.debug(f'track_cell[time]: {track_cell["time"]}')

        track_cell[cube.name()] = cube.core_data()

    return cubelist_cell_sum_out, track_cell
Example #6
0
def extract_cell_cubes_subset(cubelist_in,
                              mask,
                              track,
                              cell,
                              z_coord='model_level_number',
                              height_levels=None):

    from iris.analysis import SUM
    from iris import Constraint
    from iris.cube import CubeList
    from iris.coords import AuxCoord
    import numpy as np
    from tobac import mask_cell, mask_cell_surface, get_bounding_box
    from copy import deepcopy

    track_i = track[track['cell'] == cell]

    cubelist_cell_integrated_out = CubeList()
    cubelist_cell_sum = CubeList()

    for time_i in track_i['time'].values:

        logging.debug('start extracting cubes for cell ' + str(cell) +
                      ' and time ' + str(time_i))

        constraint_time = Constraint(time=time_i)
        mask_i = mask.extract(constraint_time)
        mask_cell_i = mask_cell(mask_i, cell, track_i, masked=False)
        mask_cell_surface_i = mask_cell_surface(mask_i,
                                                cell,
                                                track_i,
                                                masked=False,
                                                z_coord=z_coord)

        x_dim = mask_cell_surface_i.coord_dims('projection_x_coordinate')[0]
        y_dim = mask_cell_surface_i.coord_dims('projection_y_coordinate')[0]
        x_coord = mask_cell_surface_i.coord('projection_x_coordinate')
        y_coord = mask_cell_surface_i.coord('projection_y_coordinate')

        if (mask_cell_surface_i.core_data() > 0).any():
            box_mask_i = get_bounding_box(mask_cell_surface_i.core_data(),
                                          buffer=1)

            box_mask = [[
                x_coord.points[box_mask_i[x_dim][0]],
                x_coord.points[box_mask_i[x_dim][1]]
            ],
                        [
                            y_coord.points[box_mask_i[y_dim][0]],
                            y_coord.points[box_mask_i[y_dim][1]]
                        ]]
        else:
            box_mask = [[np.nan, np.nan], [np.nan, np.nan]]

        width = 20
        dx = 500
        x = track_i[track_i['time'].values ==
                    time_i]['projection_x_coordinate'].values[0]
        y = track_i[track_i['time'].values ==
                    time_i]['projection_y_coordinate'].values[0]

        n_add_width = 2

        box_slice = [[
            x - (width + n_add_width) * dx, x + (width + n_add_width) * dx
        ], [y - (width + n_add_width) * dx, y + (width + n_add_width) * dx]]

        x_min = np.nanmin([box_mask[0][0], box_slice[0][0]])
        x_max = np.nanmax([box_mask[0][1], box_slice[0][1]])
        y_min = np.nanmin([box_mask[1][0], box_slice[1][0]])
        y_max = np.nanmax([box_mask[1][1], box_slice[1][1]])

        constraint_x = Constraint(projection_x_coordinate=lambda cell: int(
            x_min) < cell < int(x_max))
        constraint_y = Constraint(projection_y_coordinate=lambda cell: int(
            y_min) < cell < int(y_max))

        constraint = constraint_time & constraint_x & constraint_y

        mask_cell_i = mask_cell_i.extract(constraint_x & constraint_y)
        mask_cell_surface_i = mask_cell_surface_i.extract(constraint_x
                                                          & constraint_y)

        cubelist_i = cubelist_in.extract(constraint)

        cubelist_cell_sum.extend(
            sum_profile_mask(cubelist_i, height_levels, mask_cell_i))
    cubelist_cell_sum_out = cubelist_cell_sum.merge()
    for cube in cubelist_cell_sum_out:
        cell_time_coord = AuxCoord(
            track_i['time_cell'].dt.total_seconds().values,
            units='s',
            long_name='time_cell')
        cube.add_aux_coord(cell_time_coord, cube.coord_dims('time')[0])

    for cube in cubelist_cell_sum_out:
        cubelist_cell_integrated_out.append(
            cube.collapsed(('geopotential_height'), SUM))

    track_cell_integrated = deepcopy(track_i)
    #
    for cube in cubelist_cell_integrated_out:
        track_cell_integrated[cube.name()] = cube.core_data()

    return cubelist_cell_sum_out, cubelist_cell_integrated_out, track_cell_integrated
Example #7
0
    def _get_cube(self,
                  file_list,
                  climatology=False,
                  overlay_probability_levels=False):
        """
        Get an iris cube based on the given files using selection criteria
        from the input_data.

        @param file_list (list[str]): a list of file name to retrieve data from
        @param climatology (boolean): if True extract the climatology data
        @param overlay_probability_levels (boolean): if True only include the
            10th, 50th and 90th percentile data

        @return an iris cube, maybe 'None' if overlay_probability_levels=True
        """
        if climatology is True:
            LOG.info("_get_cube for climatology")
        elif overlay_probability_levels is True:
            LOG.info("_get_cube, overlay probability levels")
        else:
            LOG.info("_get_cube")

        if LOG.getEffectiveLevel() == logging.DEBUG:
            LOG.debug("_get_cube from %s files", len(file_list))
            for fpath in file_list:
                LOG.debug(" - FILE: %s", fpath)

        # Load the cubes
        cubes = CubeList()
        try:
            for file_path in file_list:
                f_list = glob.glob(file_path)
                cube_list = [iris.load_cube(f) for f in f_list]
                cubes.extend(cube_list)

        except IOError as ex:
            if overlay_probability_levels is True:
                # not all variables have corresponding probabilistic data
                return None
            for file_name in file_list:
                file_name = file_name.split("*")[0]
                if not path.exists(file_name):
                    LOG.error("File not found: %s", file_name)
            raise UKCPDPDataNotFoundException from ex

        if overlay_probability_levels is True:
            collection = COLLECTION_PROB
        else:
            collection = self.input_data.get_value(InputType.COLLECTION)

        # Remove time_bnds cubes
        if collection == COLLECTION_PROB:
            unfiltered_cubes = cubes
            cubes = CubeList()
            for cube in unfiltered_cubes:
                if cube.name() != "time_bnds":
                    cubes.append(cube)

        # Different creation dates will stop cubes concatenating, so lets
        # remove them
        for cube in cubes:
            coords = cube.coords(var_name="creation_date")
            for coord in coords:
                cube.remove_coord(coord)

        if len(cubes) == 0:
            LOG.warning("No data was retrieved from the following files:%s",
                        file_list)
            raise UKCPDPDataNotFoundException(
                "No data found for given selection options")

        LOG.debug("First cube:\n%s", cubes[0])
        LOG.debug("Concatenate cubes:\n%s", cubes)

        iris.experimental.equalise_cubes.equalise_attributes(cubes)
        unify_time_units(cubes)

        try:
            cube = cubes.concatenate_cube()
        except iris.exceptions.ConcatenateError as ex:
            LOG.error("Failed to concatenate cubes:\n%s\n%s", ex, cubes)
            error_cubes = CubeList()
            for error_cube in cubes:
                error_cubes.append(error_cube)
                try:
                    LOG.info("Appending %s",
                             error_cube.coord("ensemble_member_id").points[0])
                except iris.exceptions.CoordinateNotFoundError:
                    pass
                try:
                    error_cubes.concatenate_cube()
                except iris.exceptions.ConcatenateError as ex:
                    message = ""
                    try:
                        message = " {}".format(
                            error_cube.coord("ensemble_member_id").points[0])
                    except iris.exceptions.CoordinateNotFoundError:
                        pass
                    LOG.error(
                        "Error when concatenating cube%s:\n%s\n%s",
                        message,
                        ex,
                        error_cube,
                    )
                    break

            # pylint: disable=W0707
            raise UKCPDPDataNotFoundException(
                "No data found for given selection options")

        LOG.debug("Concatenated cube:\n%s", cube)

        if climatology is True:
            # generate a time slice constraint based on the baseline
            time_slice_constraint = self._time_slice_selector(True)
        else:
            # generate a time slice constraint
            time_slice_constraint = self._time_slice_selector(False)
        if time_slice_constraint is not None:
            cube = cube.extract(time_slice_constraint)

        if cube is None:
            if time_slice_constraint is not None:
                LOG.warning(
                    "Time slice constraint resulted in no cubes being "
                    "returned: %s",
                    time_slice_constraint,
                )
            raise UKCPDPDataNotFoundException(
                "Selection constraints resulted in no data being"
                " selected")

        # generate a temporal constraint
        temporal_constraint = self._get_temporal_selector()
        if temporal_constraint is not None:
            cube = cube.extract(temporal_constraint)

        if cube is None:
            if temporal_constraint is not None:
                LOG.warning(
                    "Temporal constraint resulted in no cubes being "
                    "returned: %s",
                    temporal_constraint,
                )
            raise UKCPDPDataNotFoundException(
                "Selection constraints resulted in no data being"
                " selected")

        # extract 10, 50 and 90 percentiles
        if overlay_probability_levels is True:
            cube = get_probability_levels(cube, False)

        # generate an area constraint
        area_constraint = self._get_spatial_selector(cube, collection)
        if area_constraint is not None:
            cube = cube.extract(area_constraint)
            if self.input_data.get_area_type() == AreaType.BBOX:
                # Make sure we still have x, y dimension coordinated for
                # bboxes
                cube = self._promote_x_y_coords(cube)

        if cube is None:
            if area_constraint is not None:
                LOG.warning(
                    "Area constraint resulted in no cubes being "
                    "returned: %s",
                    area_constraint,
                )
            raise UKCPDPDataNotFoundException(
                "Selection constraints resulted in no data being"
                " selected")

        return cube
Example #8
0
class SetupNormalInputs(SetupInputs, SetupCubes):
    """Create a class for setting up cubes for testing."""
    @ManageWarnings(
        ignored_messages=["Collapsing a non-contiguous coordinate."],
        warning_types=[UserWarning],
    )
    def setUp(self):
        """Set up expected inputs."""
        super().setUp()
        # Set up cubes and associated data arrays for temperature.
        self.forecast_predictor_mean = CubeList([
            self.historic_temperature_forecast_cube.collapsed(
                "realization", iris.analysis.MEAN)
        ])
        self.forecast_predictor_realizations = CubeList([
            (self.historic_temperature_forecast_cube.copy())
        ])
        self.forecast_predictor_spot = CubeList([
            self.historic_forecast_spot_cube.collapsed("realization",
                                                       iris.analysis.MEAN)
        ])

        self.fp_additional_predictor_spot = CubeList(
            [self.forecast_predictor_spot[0].copy()])
        self.fp_additional_predictor_spot.extend([self.spot_altitude_cube])

        self.forecast_variance = self.historic_temperature_forecast_cube.collapsed(
            "realization", iris.analysis.VARIANCE)
        self.forecast_variance_spot = self.forecast_predictor_spot[0].copy()
        self.forecast_variance_spot.data = self.forecast_variance_spot.data / 10.0

        self.truth = self.historic_temperature_forecast_cube.collapsed(
            "realization", iris.analysis.MAX)
        self.forecast_predictor_data = (
            self.forecast_predictor_mean[0].data.flatten().astype(np.float64))
        self.forecast_predictor_data_realizations = convert_cube_data_to_2d(
            self.historic_temperature_forecast_cube.copy()).astype(np.float64)
        self.forecast_variance_data = self.forecast_variance.data.flatten(
        ).astype(np.float64)
        self.truth_data = self.truth.data.flatten().astype(np.float64)

        spatial_product = np.prod(self.truth.shape[-2:])
        self.initial_guess_spot_mean = np.broadcast_to(
            self.initial_guess_for_mean,
            (
                spatial_product,
                len(self.initial_guess_for_mean),
            ),
        )
        self.initial_guess_spot_realizations = np.broadcast_to(
            self.initial_guess_for_realization,
            (
                spatial_product,
                len(self.initial_guess_for_realization),
            ),
        )
        self.ig_spot_mean_additional_predictor = np.broadcast_to(
            self.initial_guess_mean_additional_predictor,
            (
                spatial_product,
                len(self.initial_guess_mean_additional_predictor),
            ),
        )