Example #1
0
    def test_calibrated_predictor_realizations(self):
        """
        Test that the plugin returns values for the calibrated forecasts,
        which match the expected values when the individual ensemble
        realizations are used as the predictor.
        """
        data = np.array([[239.904142, 251.659267, 263.414393],
                         [275.169518, 286.92465, 298.67975],
                         [310.43488, 322.19, 333.94516]],
                        dtype=np.float32)

        cube = self.current_temperature_forecast_cube
        cube1 = cube.copy()
        cube2 = cube.copy()

        cube2.coord("time").points = cube2.coord("time").points + 3
        cube2.data += 3

        cube = concatenate_cubes(CubeList([cube1, cube2]))

        optimised_coeffs = {}

        for time_slice in cube.slices_over("time"):
            the_date = datetime_from_timestamp(time_slice.coord("time").points)
            optimised_coeffs[the_date] = np.array(
                [5, 1, 0, 0.57, 0.6, 0.6], dtype=np.float32)
        self.coeff_names = ["gamma", "delta", "a", "beta"]

        predictor_cube = cube.copy()
        variance_cube = cube.collapsed("realization", iris.analysis.VARIANCE)

        predictor_of_mean_flag = "realizations"

        plugin = Plugin(self.cube, optimised_coeffs,
                        self.coeff_names)
        forecast_predictor, _, _ = plugin._apply_params(
            predictor_cube, variance_cube, optimised_coeffs,
            self.coeff_names, predictor_of_mean_flag)
        self.assertArrayAlmostEqual(forecast_predictor[0].data, data,
                                    decimal=4)
Example #2
0
def get_cube(url,
             name_list,
             bbox=None,
             time=None,
             units=None,
             callback=None,
             constraint=None):
    """Only `url` and `name_list` are mandatory.  The kw args are:
    `bbox`, `callback`, `time`, `units`, `constraint`."""

    cubes = iris.load_raw(url, callback=callback)

    in_list = lambda cube: cube.standard_name in name_list
    cubes = CubeList([cube for cube in cubes if in_list(cube)])
    if not cubes:
        raise ValueError('Cube does not contain {!r}'.format(name_list))
    else:
        cube = cubes.merge_cube()

    if constraint:
        cube = cube.extract(constraint)
        if not cube:
            raise ValueError('No cube using {!r}'.format(constraint))
    if bbox:
        cube = subset(cube, bbox)
        if not cube:
            raise ValueError('No cube using {!r}'.format(bbox))
    if time:
        if isinstance(time, datetime):
            start, stop = time, None
        elif isinstance(time, tuple):
            start, stop = time[0], time[1]
        else:
            raise ValueError('Time must be start or (start, stop).'
                             '  Got {!r}'.format(time))
        cube = time_slice(cube, start, stop)
    if units:
        if cube.units != units:
            cube.convert_units(units)
    return cube
Example #3
0
    def process(
        self,
        input_cubes: Union[CubeList, List[Cube]],
        timezone_cube: Cube,
        local_time: datetime,
    ) -> Cube:
        """
        Calculates timezone-offset data for the specified UTC output times

        Args:
            input_cubes:
                Cube or list of cubes of data to extract timezone-offsets from. Must
                contain a time coord spanning all the timezones.
            timezone_cube:
                Cube describing the UTC offset for the local time at each grid location.
                Must have the same spatial coords as input_cube.
            local_time:
                The "local" time of the output cube. This will form a
                scalar "time_in_local_timezone" coord on the output cube, while the
                "time" coord will be auxillary to the spatial coords and will show the
                UTC time that matches the local_time at each point.

        Returns:
            Output local-time cube. The time coord will span the spatial coords.
            The time_in_local_timezone coord will match the local_time supplied.
            All other coords and attributes will match those found on input_cube.
        """
        if isinstance(input_cubes, iris.cube.Cube):
            input_cube = input_cubes
        else:
            input_cube = MergeCubes()(CubeList(input_cubes))

        self.check_timezones_are_unique(timezone_cube)
        self.check_input_cube_dims(input_cube, timezone_cube)
        self.check_input_cube_time(input_cube, local_time)

        self._fill_timezones(input_cube)
        output_cube = self.create_output_cube(input_cube, local_time)

        return output_cube
def main():
    # Parameters to compare between forecasts
    path = datadir + 'deterministic/'
    filename = 'rp_physics.nc'
    name = 'Temperature [K]'
    pressure = 500
    lead_time = 7*24

    cs = iris.Constraint(
        name=name, pressure=pressure, forecast_period=lead_time)

    # Load full precision reference forecast
    cube = iris.load_cube(path + filename, cs)

    # Calculate the errors with each different precision used as the `truth`
    diffs = CubeList()
    for pseudo_truth in cube.slices_over('precision'):
        # Add the precision of the `truth` cube as another coordinate
        p = pseudo_truth.coord('precision').points[0]
        p = AuxCoord(p, long_name='reference_precision')

        # Calculate the errors
        diff = rms_diff(cube, pseudo_truth)
        diff.add_aux_coord(p)

        # Store the errors in the cubelist
        diffs.append(diff)

    # Combine all the errors into a single cube with dimensions of
    # precision vs reference_precision
    diffs = diffs.merge_cube()

    # Plot the errors
    qplt.pcolor(diffs, vmin=0, cmap='cubehelix_r')
    precisions = cube.coord('precision').points
    plt.xticks(precisions)
    plt.yticks(precisions)

    plt.show()
    return
Example #5
0
    def test_statsmodels_realizations(self, warning_list=None):
        """
        Test that the plugin raises the desired warning if the statsmodels
        module is not found for when the predictor is the ensemble
        realizations.
        """
        import imp
        try:
            statsmodels_found = imp.find_module('statsmodels')
            statsmodels_found = True
        except ImportError:
            statsmodels_found = False

        cube = self.cube

        historic_forecasts = CubeList([])
        for index in [1.0, 2.0, 3.0, 4.0, 5.0]:
            temp_cube = cube.copy()
            temp_cube.coord("time").points = (temp_cube.coord("time").points -
                                              index)
            historic_forecasts.append(temp_cube)
        historic_forecasts.concatenate_cube()

        current_forecast_predictor = cube
        truth = cube.collapsed("realization", iris.analysis.MAX)
        distribution = "gaussian"
        desired_units = "degreesC"
        predictor_of_mean_flag = "realizations"
        no_of_realizations = 3
        estimate_coefficients_from_linear_model_flag = True

        if not statsmodels_found:
            plugin = Plugin(distribution,
                            desired_units,
                            predictor_of_mean_flag=predictor_of_mean_flag)
            self.assertTrue(len(warning_list) == 1)
            self.assertTrue(
                any(item.category == ImportWarning for item in warning_list))
            self.assertTrue(
                "The statsmodels can not be imported" in str(warning_list[0]))
Example #6
0
def process(
    steering_flow: inputflow,
    orographic_enhancement: cli.inputcube,
    *cubes: cli.inputcube,
):
    """Calculate optical flow components as perturbations from the model
    steering flow.  Advects the older of the two input radar observations to
    the validity time of the newer observation, then calculates the velocity
    required to adjust this forecast to match the observation.  Sums the
    steering flow and perturbation values to give advection components for
    extrapolation nowcasting.

    Args:
        steering_flow (iris.cube.CubeList):
            Model steering flow as u- and v- wind components.  These must
            have names: "grid_eastward_wind" and "grid_northward_wind".
        orographic_enhancement (iris.cube.Cube):
            Cube containing the orographic enhancement fields.
        cubes (tuple of iris.cube.Cube):
            Two radar precipitation observation cubes.

    Returns:
        iris.cube.CubeList:
            List of u- and v- advection velocities
    """
    from iris.cube import CubeList

    from improver.nowcasting.optical_flow import (
        generate_advection_velocities_from_winds,
    )

    if len(cubes) != 2:
        raise ValueError("Expected 2 radar cubes - got {}".format(len(cubes)))

    advection_velocities = generate_advection_velocities_from_winds(
        CubeList(cubes), steering_flow, orographic_enhancement
    )

    return advection_velocities
Example #7
0
    def test_forecast_coefficients_and_land_sea_mask(self):
        """Test the addition of a land-sea mask."""
        (
            forecast,
            coeffs,
            additional_predictors,
            land_sea_mask,
            template,
        ) = split_forecasts_and_coeffs(
            CubeList([
                self.realization_forecast,
                self.coefficient_cubelist,
                self.land_sea_mask,
            ]),
            self.land_sea_mask_name,
        )

        self.assertCubeEqual(forecast, self.realization_forecast[0])
        self.assertCubeListEqual(coeffs, self.coefficient_cubelist)
        self.assertEqual(additional_predictors, None)
        self.assertCubeEqual(land_sea_mask, self.land_sea_mask[0])
        self.assertEqual(template, None)
Example #8
0
    def _parse_inputs(self, inputs: List[Cube]) -> None:
        """
        Separates input CubeList into CAPE and precipitation rate objects with standard units
        and raises Exceptions if it can't, or finds excess data.

        Args:
            inputs:
                List of Cubes containing exactly one of CAPE and Precipitation rate.
        Raises:
            ValueError:
                If additional cubes are found
        """
        cubes = CubeList(inputs)
        try:
            (self.cape, self.precip) = cubes.extract(self.cube_names)
        except ValueError as e:
            raise ValueError(
                f"Expected to find cubes of {self.cube_names}, not {[c.name() for c in cubes]}"
            ) from e
        if len(cubes) > 2:
            extras = [
                c.name() for c in cubes if c.name() not in self.cube_names
            ]
            raise ValueError(f"Unexpected Cube(s) found in inputs: {extras}")
        if not spatial_coords_match(inputs):
            raise ValueError(
                f"Spatial coords of input Cubes do not match: {cubes}")
        time_error_msg = self._input_times_error()
        if time_error_msg:
            raise ValueError(time_error_msg)
        self.cape.convert_units("J kg-1")
        self.precip.convert_units("mm h-1")
        if self.model_id_attr:
            if (self.cape.attributes[self.model_id_attr] !=
                    self.precip.attributes[self.model_id_attr]):
                raise ValueError(
                    f"Attribute {self.model_id_attr} does not match on input cubes. "
                    f"{self.cape.attributes[self.model_id_attr]} != "
                    f"{self.precip.attributes[self.model_id_attr]}")
Example #9
0
    def constrained_inputcubelist_converter(to_convert):
        """Passes the cube and constraints onto maybe_coerce_with.

        Args:
            to_convert (string):
                The filename to be loaded.

        Returns:
            iris.cube.CubeList:
                The loaded cubelist of constrained cubes.

        Raises:
            ValueError:
                Each constraint (either a string or a list) is expected to
                return a single match. An error is raised if no match or more
                than one match is found.
        """
        from improver.utilities.load import load_cube
        from iris.cube import CubeList

        cubelist = CubeList()
        for constr in constraints:
            constr_list = [constr] if isinstance(constr, str) else constr
            found_cubes = []
            for constr_item in constr_list:
                try:
                    found_cubes.append(maybe_coerce_with(
                        load_cube, to_convert, constraints=constr_item))
                except ValueError:
                    pass
            if len(found_cubes) != 1:
                msg = (f"Incorrect number of valid inputs available for the "
                       "{constr} constraint. "
                       f"Number of valid inputs: {len(found_cubes)} "
                       f"The valid inputs found are: {found_cubes}")
                raise ValueError(msg)
            cubelist.extend(found_cubes)

        return cubelist
Example #10
0
    def test_result_with_vii_longfc(self):
        """Test that the method returns the expected data when vii is
        present and forecast time is 4 hours"""
        expected = self.set_up_vii_input_output()

        # test_vii_null with no precip will now return 0.0067
        expected.data[5, 5] = 0.0067

        # test_vii_small with no and light precip will now return zero
        expected.data[7, 5:7] = 0.0

        # test_vii_large with no and light precip now return zero
        # and 0.25 for heavy precip
        expected.data[8, 5:8] = [0.0, 0.0, 0.25]
        # No halo - we're only testing this method.
        # 2000m is the grid-length, so halo includes only one pixel.
        plugin = Plugin(2000.0)
        result = plugin(
            CubeList([self.fg_cube, self.ltng_cube, self.precip_cube, self.vii_cube])
        )
        self.assertIsInstance(result, Cube)
        self.assertArrayAlmostEqual(result.data, expected.data)
Example #11
0
def get_1d_two_param_cube(params=None, n_samples=10):
    """
    Create an ensemble of 1d cubes perturbed over two idealised parameter
    spaces. One of params or n_samples must be provided
    :param np.array params: A list of params to sample the ensemble over
    :param int n_samples: The number of params to sample (between 0. and 1.)
    :return:
    """
    from iris.cube import CubeList

    if params is None:
        params = np.linspace(np.zeros((2, )), np.ones((2, )), n_samples)

    cubes = CubeList([])
    for j, p in enumerate(params):
        c = make_dummy_1d_cube(j)
        # Perturb base data to represent some change in a parameter
        c.data *= simple_polynomial_fn_two_param(*p)
        cubes.append(c)

    ensemble = cubes.concatenate_cube()
    return ensemble
Example #12
0
    def find_slices_over_coordinate(cube, coord_name):
        """
        Try slicing over the given coordinate. If the requested coordinate is
        not a dimension coordinate then still return an iterable.

        Args:
            cube : iris.cube.Cube
                Cube to be sliced.
            coord_name : String
                Name of the coordinate to be used for slicing.

        Returns:
            slices_over_coord : iris.cube._SliceIterator or iris.cube.CubeList
                Iterable returned to slice over the requested coordinate, or
                a CubeList.
        """
        try:
            cube.coord(coord_name, dim_coords=True)
            slices_over_coord = cube.slices_over(coord_name)
        except CoordinateNotFoundError:
            slices_over_coord = CubeList([cube])
        return slices_over_coord
Example #13
0
def process(*cubes: cli.inputcube, radius: float = 10000.0):
    """
    Converts a phase-change-level cube into the
    probability of a specific precipitation phase being found at the surface.

    Args:
        cubes (iris.cube.CubeList or list):
            Contains cubes of the altitude of the phase-change level (this
            can be snow->sleet, or sleet->rain) and the altitude of the
            orography. The name of the phase-change level cube must be
            either "altitude_of_snow_falling_level" or
            "altitude_of_rain_falling_level". The name of the orography
            cube must be "surface_altitude".
        radius (float):
            Neighbourhood radius from which 80th percentile is found (m)

    """
    from improver.psychrometric_calculations.precip_phase_probability import (
        PrecipPhaseProbability, )
    from iris.cube import CubeList

    return PrecipPhaseProbability(radius=radius)(CubeList(cubes))
Example #14
0
    def test_calibrated_variance_realizations(self):
        """
        Test that the plugin returns values for the calibrated forecasts,
        which match the expected values when the individual ensemble
        realizations are used as the predictor.
        """
        data = np.array([[34.333333, 34.333333, 34.333333],
                         [34.333333, 34.333333, 34.333333],
                         [34.333333, 34.333333, 34.333333]])

        cube = self.current_temperature_forecast_cube
        cube1 = cube.copy()
        cube2 = cube.copy()

        cube2.coord("time").points = cube2.coord("time").points + 3
        cube2.data += 3

        cube = concatenate_cubes(CubeList([cube1, cube2]))

        optimised_coeffs = {}

        for time_slice in cube.slices_over("time"):
            the_date = datetime_from_timestamp(time_slice.coord("time").points)
            optimised_coeffs[the_date] = np.array(
                [5, 1, 0, 0.57, 0.6, 0.6])
        self.coeff_names = ["gamma", "delta", "a", "beta"]

        predictor_cube = cube.copy()
        variance_cube = cube.collapsed("realization", iris.analysis.VARIANCE)

        predictor_of_mean_flag = "realizations"

        plugin = Plugin(self.cube, optimised_coeffs,
                        self.coeff_names)
        _, forecast_variance, _ = plugin._apply_params(
            predictor_cube, variance_cube, optimised_coeffs,
            self.coeff_names, predictor_of_mean_flag)
        self.assertArrayAlmostEqual(forecast_variance[0].data, data,
                                    decimal=4)
Example #15
0
def sum_profile_mask(cubelist_in, height_levels_borders, mask_cell):
    from iris.cube import CubeList
    from iris.analysis import SUM
    from dask.array.ma import masked_invalid
    cubelist_out = CubeList()
    for variable in cubelist_in:
        #Sum up values for height slices and mask for all cubes in cubelist_in:
        cube_cell_profile = collapse_profile_mask(
            variable_cube=variable,
            height_levels_borders=height_levels_borders,
            mask_cell=mask_cell,
            coordinate='geopotential_height',
            method=SUM)
        cube_cell_profile.rename(variable.name())
        cube_cell_profile.data = masked_invalid(cube_cell_profile.core_data())
        coord_names = [coord.name() for coord in cube_cell_profile.coords()]
        coord_names.remove('time')
        coord_names.remove('geopotential_height')
        for coord in coord_names:
            cube_cell_profile.remove_coord(coord)
        cubelist_out.append(cube_cell_profile)
    return cubelist_out
Example #16
0
def test_derive_nonstandard_nofx():

    short_name = 'alb'
    long_name = 'albedo at the surface'
    units = 1
    standard_name = ''

    rsds = Cube([2.])
    rsds.standard_name = 'surface_downwelling_shortwave_flux_in_air'

    rsus = Cube([1.])
    rsus.standard_name = 'surface_upwelling_shortwave_flux_in_air'

    cubes = CubeList([rsds, rsus])

    alb = derive(cubes, short_name, long_name, units, standard_name)

    print(alb)
    assert alb.var_name == short_name
    assert alb.long_name == long_name
    assert alb.units == units
    assert alb.data == [0.5]
Example #17
0
def process(wind_speed: cli.inputcube, wind_direction: cli.inputcube):
    """Converts speed and direction into individual velocity components.

    Args:
        wind_speed (iris.cube.Cube):
            A cube of wind speed.
        wind_direction (iris.cube.Cube):
            A cube of wind from direction.

    Returns:
        iris.cube.Cubelist:
            A cubelist of the speed and direction as U and V cubes.
    """
    from iris.cube import CubeList

    from improver.wind_calculations.wind_components import ResolveWindComponents

    if not (wind_speed and wind_direction):
        raise TypeError("Neither wind_speed or wind_direction can be none")

    u_cube, v_cube = ResolveWindComponents()(wind_speed, wind_direction)
    return CubeList([u_cube, v_cube])
Example #18
0
 def setUp(self):
     """Set up a cube with 2 thresholds to test normalisation. We are
     testing normalising along the leading dimension in this cube."""
     thresholds = [10, 20]
     data = np.ones((2, 2, 3), dtype=np.float32)
     cycle1 = set_up_probability_cube(
         data,
         thresholds,
         spatial_grid="equalarea",
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 0, 0),
     )
     cycle2 = set_up_probability_cube(
         data,
         thresholds,
         spatial_grid="equalarea",
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 1, 0),
     )
     cycle3 = set_up_probability_cube(
         data,
         thresholds,
         spatial_grid="equalarea",
         time=datetime(2017, 11, 10, 4, 0),
         frt=datetime(2017, 11, 10, 2, 0),
     )
     self.spatial_weights_cube = CubeList([cycle1, cycle2,
                                           cycle3]).merge_cube()
     self.spatial_weights_cube = squeeze(self.spatial_weights_cube)
     self.spatial_weights_cube.rename("weights")
     # This input array has 3 forecast reference times and 2 thresholds.
     # The two thresholds have the same weights.
     self.spatial_weights_cube.data = np.array(
         [[[[0.2, 0, 0.2], [0.2, 0, 0.2]], [[0.2, 0, 0.2], [0.2, 0, 0.2]]],
          [[[0, 0, 0.5], [0, 0, 0.5]], [[0, 0, 0.5], [0, 0, 0.5]]],
          [[[0.3, 0.3, 0.3], [0.3, 0.3, 0.3]],
           [[0.3, 0.3, 0.3], [0.3, 0.3, 0.3]]]],
         dtype=np.float32)
     self.plugin = SpatiallyVaryingWeightsFromMask()
    def test_number_of_percentiles(self):
        """
        Test that the plugin returns a cube with the expected number of
        percentiles.
        """
        cube = self.current_temperature_forecast_cube
        current_forecast_predictor = cube.collapsed(
            "realization", iris.analysis.MEAN)
        current_forecast_variance = cube.collapsed(
            "realization", iris.analysis.VARIANCE)
        raw_forecast = cube.copy()

        predictor_and_variance = CubeList(
            [current_forecast_predictor, current_forecast_variance])

        no_of_percentiles = len(raw_forecast.coord("realization").points)

        plugin = Plugin()
        result = plugin.process(predictor_and_variance, no_of_percentiles)
        self.assertEqual(
            len(raw_forecast.coord("realization").points),
            len(result.coord("percentile_over_realization").points))
Example #20
0
    def constrained_inputcubelist_converter(to_convert):
        """Passes the cube and constraints onto maybe_coerce_with.

        Args:
            to_convert (str or iris.cube.CubeList):
                A CubeList or a filename to be loaded into a CubeList.

        Returns:
            iris.cube.CubeList:
                The loaded cubelist of constrained cubes.
        """
        from iris import Constraint
        from iris.cube import CubeList

        from improver.utilities.load import load_cubelist

        cubelist = maybe_coerce_with(load_cubelist, to_convert)

        return CubeList(
            cubelist.extract_cube(
                Constraint(cube_func=constr) if callable(constr) else constr)
            for constr in constraints)
Example #21
0
def cl_cubes():
    """Cubes for ``cl.``."""
    b_coord = AuxCoord(
        [1.0],
        var_name='b',
        long_name='vertical coordinate formula term: b(k)',
        attributes={
            'a': 1,
            'b': '2'
        },
    )
    cl_cube = Cube(
        [0.0],
        var_name='cl',
        standard_name='cloud_area_fraction_in_atmosphere_layer',
        aux_coords_and_dims=[(b_coord.copy(), 0)],
    )
    x_cube = Cube([0.0],
                  long_name='x',
                  aux_coords_and_dims=[(b_coord.copy(), 0)])
    cubes = CubeList([cl_cube, x_cube])
    return cubes
Example #22
0
def process(*cubes: cli.inputcube, wxtree="high_resolution"):
    """ Processes cube for Weather symbols.

    Args:
        cubes (iris.cube.CubeList):
            A cubelist containing the diagnostics required for the
            weather symbols decision tree, these at co-incident times.
        wxtree (str):
            Weather Code tree: high_resolution or global.

    Returns:
        iris.cube.Cube:
            A cube of weather symbols.
    """
    from iris.cube import CubeList
    from improver.wxcode.weather_symbols import WeatherSymbols

    if not cubes:
        raise RuntimeError("Not enough input arguments. "
                           "See help for more information.")

    return WeatherSymbols(wxtree=wxtree)(CubeList(cubes))
Example #23
0
def test_filter_realizations(realization_cubes, short_realizations):
    """Run filter_realizations with realization time series where 0 or more are short of the
    final time step"""
    if short_realizations == 0:
        cubes = realization_cubes
        expected_realization_points = [0, 1, 2, 3]
    else:
        cubes = CubeList(realization_cubes[:-short_realizations])
        cubes.append(realization_cubes[-short_realizations][:-1])
        expected_realization_points = [0, 1, 2, 3][:-short_realizations]
    result = filter_realizations(cubes)
    assert isinstance(result, Cube)
    assert np.allclose(cubes[0].coord("time").points,
                       result.coord("time").points)
    assert np.allclose(
        result.coord("realization").points, expected_realization_points)
    if short_realizations == 3:
        # History attribute is retained if there are no differing values
        assert result.attributes["history"] == cubes[0].attributes["history"]
    else:
        # History attribute is removed if differing values are supplied
        assert "history" not in result.attributes.keys()
Example #24
0
def load_phenomena(url, name_list, callback=None, strict=False):
    """
    Return cube(s) for a certain phenomena in `name_list`.
    The `name_list` must be a collection of CF-1.6 `standard_name`s.

    If `strict` is set to True the function will return **only** one cube,
    if only one is expected to exist, otherwise an exception will be raise.
    (Similar to iris `extract_strict` method.)

    The user may also pass a `callback` function to coerce the metadata
    to CF-conventions.

    Examples
    --------
    >>> import iris
    >>> url = ("http://omgsrv1.meas.ncsu.edu:8080/thredds/dodsC/fmrc/sabgom/"
    ...        "SABGOM_Forecast_Model_Run_Collection_best.ncd")
    >>> name_list = cf_name_list['sea_water_temperature']
    >>> cubes = load_phenomena(url, name_list)
    >>> cube = load_phenomena(url, name_list, strict=True)
    >>> isinstance(cubes, CubeList)
    True
    >>> isinstance(cube, iris.cube.Cube)
    True
    """

    cubes = iris.load_raw(url, callback=callback)
    cubes = [cube for cube in cubes if _in_list(cube, name_list)]
    cubes = _filter_none(cubes)
    cubes = CubeList(cubes)
    if not cubes:
        raise ValueError('Cannot find {!r} in {}.'.format(name_list, url))
    if strict:
        if len(cubes) == 1:
            return cubes[0]
        else:
            msg = "> 1 cube found!  Expected just one.\n {!r}".format
        raise ValueError(msg(cubes))
    return cubes
    def test_basic_load(self):
        file_path = tests.get_data_path(
            ("NetCDF", "unstructured_grid", "theta_nodal_xios.nc"))

        # cube = iris.load_cube(file_path, "theta")
        # Note: cannot use iris.load, as merge does not yet preserve
        # the cube 'ugrid' properties.

        # Here's a thing that at least works.
        loaded_cubes = CubeList(load_cubes(file_path))

        # Just check some expected details.
        self.assertEqual(len(loaded_cubes), 2)

        (cube_0, ) = loaded_cubes.extract(Constraint("theta"))

        # Check the primary cube.
        self.assertIsInstance(cube_0, UCube)
        self.assertEqual(cube_0.var_name, "theta")
        self.assertEqual(cube_0.long_name, "Potential Temperature")
        self.assertEqual(cube_0.shape, (1, 6, 866))
        self.assertEqual(
            cube_0.coord_dims(cube_0.coord("time", dim_coords=True)), (0, ))
        self.assertEqual(cube_0.coord_dims("levels"), (1, ))
        self.assertEqual(cube_0.coords(dimensions=2), [])

        # Check the cube.ugrid object.
        cubegrid = cube_0.ugrid
        self.assertIsInstance(cubegrid, CubeUgrid)
        self.assertEqual(cubegrid.cube_dim, 2)
        self.assertEqual(cubegrid.mesh_location, "node")
        self.assertEqual(cubegrid.topology_dimension, 2)
        self.assertEqual(cubegrid.node_coordinates, ["latitude", "longitude"])

        # Check cube.ugrid.grid : a gridded Grid type.
        ugrid = cubegrid.grid
        self.assertIsInstance(ugrid, UGrid)
        self.assertEqual(ugrid.mesh_name, "Mesh0")
Example #26
0
def process(*cubes: cli.inputcube,
            operation='+',
            new_name=None,
            use_midpoint=False,
            check_metadata=False):
    r"""Combine input cubes.

    Combine the input cubes into a single cube using the requested operation.

    Args:
        cubes (iris.cube.CubeList or list of iris.cube.Cube):
            An iris CubeList to be combined.
        operation (str):
            An operation to use in combining input cubes. One of:
            +, -, \*, add, subtract, multiply, min, max, mean
        new_name (str):
            New name for the resulting dataset.
        use_midpoint (bool):
            If False (not set), uses the upper bound as the new coordinate
            point for expanded coordinates (eg time for accumulations / max in
            period).  If True, uses the mid-point.
        check_metadata (bool):
            If True, warn on metadata mismatch between inputs.

    Returns:
        result (iris.cube.Cube):
            Returns a cube with the combined data.
    """
    from improver.cube_combiner import CubeCombiner
    from iris.cube import CubeList
    if not cubes:
        raise TypeError("A cube is needed to be combined.")
    if new_name is None:
        new_name = cubes[0].name()
    result = CubeCombiner(operation, warnings_on=check_metadata)(
        CubeList(cubes), new_name, use_midpoint=use_midpoint)

    return result
Example #27
0
def test_derive_nonstandard_nofx():
    """Test a specific derivation."""
    short_name = 'alb'
    long_name = 'albedo at the surface'
    units = 1
    standard_name = ''

    rsdscs = Cube([2.])
    rsdscs.short_name = 'rsdscs'
    rsdscs.var_name = rsdscs.short_name

    rsuscs = Cube([1.])
    rsuscs.short_name = 'rsuscs'
    rsuscs.var_name = rsuscs.short_name

    cubes = CubeList([rsdscs, rsuscs])

    alb = derive(cubes, short_name, long_name, units, standard_name)

    assert alb.var_name == short_name
    assert alb.long_name == long_name
    assert alb.units == units
    assert alb.data == [0.5]
Example #28
0
 def test_interpolation_from_sea_points(self):
     """Test that the phase change level process returns a cube
     containing the expected data. In this case there is a single
     non-sea-level point in the orography. The snow falling level is below
     the surface of the sea, so for the single high point falling level is
     interpolated from the surrounding sea-level points."""
     orog = self.orog
     orog.data = np.zeros_like(orog.data)
     orog.data[2, 2] = 100.0
     land_sea = self.land_sea
     land_sea.data[1, 1] = 1
     result = PhaseChangeLevel(phase_change="snow-sleet",
                               grid_point_radius=1).process(
                                   CubeList([
                                       self.wet_bulb_temperature_cube,
                                       self.wet_bulb_integral_cube,
                                       orog,
                                       land_sea,
                                   ]))
     expected = self.expected_snow_sleet - 1
     expected[:, 2, 2] += 1
     self.assertIsInstance(result, iris.cube.Cube)
     self.assertArrayAlmostEqual(result.data, expected)
Example #29
0
    def process(self, cube: Cube) -> Cube:
        """
        Ensure that the cube passed to the maximum_within_vicinity method is
        2d and subsequently merged back together.

        Args:
            cube:
                Thresholded cube.

        Returns:
            Cube containing the occurrences within a vicinity for each
            xy 2d slice, which have been merged back together.
        """

        max_cubes = CubeList([])
        for cube_slice in cube.slices(
            [cube.coord(axis="y"), cube.coord(axis="x")]):
            max_cubes.append(self.maximum_within_vicinity(cube_slice))
        result_cube = max_cubes.merge_cube()

        # Put dimensions back if they were there before.
        result_cube = check_cube_coordinates(cube, result_cube)
        return result_cube
Example #30
0
    def test_multiple_times_cube(self):
        """Test using one input cube, with one site and multiple times."""
        # Set up expected dataframe.
        expected_data = [[280., np.nan], [np.nan, 281.]]
        expected_df = pd.DataFrame(expected_data, columns=["T+000", "T+001"])
        expected_df.columns.name = "forecast_period"

        # Set up cube with multiple lead times in.
        second_cube = set_up_spot_cube(
            281,
            number_of_sites=1,
            validity_time=1487311200 + 3600,
            forecast_period=1,
        )

        merged_cube = CubeList([self.cube, second_cube])
        merged_cube = merged_cube.concatenate()
        # Set up input dataframe
        data = [[1487311200, 280.], [1487311200 + 3600, 281.]]
        columns = ["time", "values"]
        input_df = pd.DataFrame(data, columns=columns)
        result = self.plugin.pivot_table(merged_cube[0], input_df)
        assert_frame_equal(expected_df, result)