Exemplo n.º 1
0
 def test_no_y_coord(self):
     cube = Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo")
     x_coord = DimCoord([10, 11, 12, 13, 14], long_name="bar")
     cube.add_dim_coord(x_coord, 1)
     data_frame = iris.pandas.as_data_frame(cube)
     self.assertArrayEqual(data_frame, cube.data)
     self.assertString(str(data_frame), tests.get_result_path(("pandas", "as_dataframe", "no_y_coord.txt")))
Exemplo n.º 2
0
 def test_time_gregorian(self):
     cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="ts")
     time_coord = DimCoord([0, 100.1, 200.2, 300.3, 400.4], long_name="time", units="days since 2000-01-01 00:00")
     cube.add_dim_coord(time_coord, 0)
     series = iris.pandas.as_series(cube)
     self.assertArrayEqual(series, cube.data)
     self.assertString(str(series), tests.get_result_path(("pandas", "as_series", "time_gregorian.txt")))
Exemplo n.º 3
0
 def test_simple(self):
     cube = Cube(np.array([0, 1, 2, 3, 4.4]), long_name="foo")
     dim_coord = DimCoord([5, 6, 7, 8, 9], long_name="bar")
     cube.add_dim_coord(dim_coord, 0)
     series = iris.pandas.as_series(cube)
     self.assertArrayEqual(series, cube.data)
     self.assertString(str(series), tests.get_result_path(("pandas", "as_series", "simple.txt")))
Exemplo n.º 4
0
class TestOSGBToLatLon(tests.IrisTest):
    def setUp(self):
        path = tests.get_data_path(
            ('NIMROD', 'uk2km', 'WO0000000003452',
             '201007020900_u1096_ng_ey00_visibility0180_screen_2km'))
        self.src = iris.load_cube(path)[0]
        # Cast up to float64, to work around numpy<=1.8 bug with means of
        # arrays of 32bit floats.
        self.src.data = self.src.data.astype(np.float64)
        self.grid = Cube(np.empty((73, 96)))
        cs = GeogCS(6370000)
        lat = DimCoord(np.linspace(46, 65, 73), 'latitude', units='degrees',
                       coord_system=cs)
        lon = DimCoord(np.linspace(-14, 8, 96), 'longitude', units='degrees',
                       coord_system=cs)
        self.grid.add_dim_coord(lat, 0)
        self.grid.add_dim_coord(lon, 1)

    def _regrid(self, method):
        regridder = Regridder(self.src, self.grid, method, 'mask')
        result = regridder(self.src)
        return result

    def test_linear(self):
        res = self._regrid('linear')
        self.assertArrayShapeStats(res, (73, 96), -16100.351951, 5603.850769)

    def test_nearest(self):
        res = self._regrid('nearest')
        self.assertArrayShapeStats(res, (73, 96), -16095.965585, 5612.657155)
Exemplo n.º 5
0
class Test_rolling_window(tests.IrisTest):
    def setUp(self):
        self.cube = Cube(np.arange(6))
        val_coord = DimCoord([0, 1, 2, 3, 4, 5], long_name="val")
        month_coord = AuxCoord(['jan', 'feb', 'mar', 'apr', 'may', 'jun'],
                               long_name='month')
        self.cube.add_dim_coord(val_coord, 0)
        self.cube.add_aux_coord(month_coord, 0)
        self.mock_agg = mock.Mock(spec=Aggregator)
        self.mock_agg.aggregate = mock.Mock(
            return_value=np.empty([4]))

    def test_string_coord(self):
        # Rolling window on a cube that contains a string coordinate.
        res_cube = self.cube.rolling_window('val', self.mock_agg, 3)
        val_coord = DimCoord(np.array([1, 2, 3, 4]),
                             bounds=np.array([[0, 2], [1, 3], [2, 4], [3, 5]]),
                             long_name='val')
        month_coord = AuxCoord(
            np.array(['jan|feb|mar', 'feb|mar|apr', 'mar|apr|may',
                      'apr|may|jun']),
            bounds=np.array([['jan', 'mar'], ['feb', 'apr'],
                             ['mar', 'may'], ['apr', 'jun']]),
            long_name='month')
        self.assertEqual(res_cube.coord('val'), val_coord)
        self.assertEqual(res_cube.coord('month'), month_coord)
Exemplo n.º 6
0
class Test_concatenate_cube(tests.IrisTest):
    def setUp(self):
        self.units = Unit('days since 1970-01-01 00:00:00',
                          calendar='gregorian')
        self.cube1 = Cube([1, 2, 3], 'air_temperature', units='K')
        self.cube1.add_dim_coord(DimCoord([0, 1, 2], 'time', units=self.units),
                                 0)

    def test_pass(self):
        self.cube2 = Cube([1, 2, 3], 'air_temperature', units='K')
        self.cube2.add_dim_coord(DimCoord([3, 4, 5], 'time', units=self.units),
                                 0)
        result = CubeList([self.cube1, self.cube2]).concatenate_cube()
        self.assertIsInstance(result, Cube)

    def test_fail(self):
        units = Unit('days since 1970-01-02 00:00:00',
                     calendar='gregorian')
        cube2 = Cube([1, 2, 3], 'air_temperature', units='K')
        cube2.add_dim_coord(DimCoord([0, 1, 2], 'time', units=units), 0)
        with self.assertRaises(iris.exceptions.ConcatenateError):
            CubeList([self.cube1, cube2]).concatenate_cube()

    def test_empty(self):
        exc_regexp = "can't concatenate an empty CubeList"
        with self.assertRaisesRegexp(ValueError, exc_regexp):
            CubeList([]).concatenate_cube()
Exemplo n.º 7
0
class Test_merge_cube(tests.IrisTest):
    def setUp(self):
        self.cube1 = Cube([1, 2, 3], "air_temperature", units="K")
        self.cube1.add_aux_coord(AuxCoord([0], "height", units="m"))

    def test_pass(self):
        cube2 = self.cube1.copy()
        cube2.coord("height").points = [1]
        result = CubeList([self.cube1, cube2]).merge_cube()
        self.assertIsInstance(result, Cube)

    def test_fail(self):
        cube2 = self.cube1.copy()
        cube2.rename("not air temperature")
        with self.assertRaises(iris.exceptions.MergeError):
            CubeList([self.cube1, cube2]).merge_cube()

    def test_empty(self):
        with self.assertRaises(ValueError):
            CubeList([]).merge_cube()

    def test_single_cube(self):
        result = CubeList([self.cube1]).merge_cube()
        self.assertEqual(result, self.cube1)
        self.assertIsNot(result, self.cube1)

    def test_repeated_cube(self):
        with self.assertRaises(iris.exceptions.MergeError):
            CubeList([self.cube1, self.cube1]).merge_cube()
Exemplo n.º 8
0
def simple_2d(with_bounds=True):
    """
    Returns an abstract, two-dimensional, optionally bounded, cube.

    >>> print(simple_2d())
    thingness                           (bar: 3; foo: 4)
         Dimension coordinates:
              bar                           x       -
              foo                           -       x

    >>> print(repr(simple_2d().data))
    [[ 0  1  2  3]
     [ 4  5  6  7]
     [ 8  9 10 11]]


    """
    cube = Cube(np.arange(12, dtype=np.int32).reshape((3, 4)))
    cube.long_name = 'thingness'
    cube.units = '1'
    y_points = np.array([2.5,   7.5,  12.5])
    y_bounds = np.array([[0, 5], [5, 10], [10, 15]], dtype=np.int32)
    y_coord = DimCoord(y_points, long_name='bar', units='1',
                       bounds=y_bounds if with_bounds else None)
    x_points = np.array([ -7.5,   7.5,  22.5,  37.5])
    x_bounds = np.array([[-15, 0], [0, 15], [15, 30], [30, 45]],
                        dtype=np.int32)
    x_coord = DimCoord(x_points, long_name='foo', units='1',
                       bounds=x_bounds if with_bounds else None)

    cube.add_dim_coord(y_coord, 0)
    cube.add_dim_coord(x_coord, 1)
    return cube
Exemplo n.º 9
0
 def test_fail(self):
     units = Unit('days since 1970-01-02 00:00:00',
                  calendar='gregorian')
     cube2 = Cube([1, 2, 3], 'air_temperature', units='K')
     cube2.add_dim_coord(DimCoord([0, 1, 2], 'time', units=units), 0)
     with self.assertRaises(iris.exceptions.ConcatenateError):
         CubeList([self.cube1, cube2]).concatenate_cube()
 def _testcube_latlon_1d(self, lats, lons):
     cube = Cube(np.zeros(len(lons)))
     co_x = AuxCoord(lons, standard_name='longitude', units='degrees')
     co_y = AuxCoord(lats, standard_name='latitude', units='degrees')
     cube.add_aux_coord(co_y, 0)
     cube.add_aux_coord(co_x, 0)
     return cube
Exemplo n.º 11
0
class TestOSGBToLatLon(tests.GraphicsTest):
    def setUp(self):
        path = tests.get_data_path(
            ('NIMROD', 'uk2km', 'WO0000000003452',
             '201007020900_u1096_ng_ey00_visibility0180_screen_2km'))
        self.src = iris.load_cube(path)[0]
        self.src.data = self.src.data.astype(np.float32)
        self.grid = Cube(np.empty((73, 96)))
        cs = GeogCS(6370000)
        lat = DimCoord(np.linspace(46, 65, 73), 'latitude', units='degrees',
                       coord_system=cs)
        lon = DimCoord(np.linspace(-14, 8, 96), 'longitude', units='degrees',
                       coord_system=cs)
        self.grid.add_dim_coord(lat, 0)
        self.grid.add_dim_coord(lon, 1)

    def _regrid(self, method):
        regridder = Regridder(self.src, self.grid, method, 'mask')
        result = regridder(self.src)
        qplt.pcolor(result, antialiased=False)
        qplt.plt.gca().coastlines()

    def test_linear(self):
        self._regrid('linear')
        self.check_graphic()

    def test_nearest(self):
        self._regrid('nearest')
        self.check_graphic()
Exemplo n.º 12
0
 def setUp(self):
     data = np.array([1, 2, 3, 4, 5])
     coord = DimCoord([6, 7, 8, 9, 10], long_name='foo')
     self.cube = Cube(data)
     self.cube.add_dim_coord(coord, 0)
     self.lazy_cube = Cube(as_lazy_data(data))
     self.lazy_cube.add_dim_coord(coord, 0)
Exemplo n.º 13
0
 def test_time_mean_from_forecast_reference_time(self):
     cube = Cube(np.zeros((3, 4)))
     cube.add_aux_coord(AuxCoord(standard_name="forecast_reference_time", units="hours since epoch", points=72))
     cube.add_aux_coord(
         AuxCoord(standard_name="time", units="hours since epoch", points=72 + 36, bounds=[72 + 24, 72 + 48])
     )
     field = self.convert_cube_to_field(cube)
     self.assertEqual(field.lbft, 48)
Exemplo n.º 14
0
 def make_cube(self, calendar):
     n_times = 10
     cube = Cube(np.arange(n_times))
     time_coord = DimCoord(np.arange(n_times), standard_name='time',
                           units=Unit('days since 1980-12-25',
                                      calendar=calendar))
     cube.add_dim_coord(time_coord, 0)
     return cube
Exemplo n.º 15
0
 def test_multi_dimensional(self):
     time = AuxCoord(np.arange(12).reshape(3, 4), 'time',
                     units='hours since 2013-10-29 18:00:00')
     cube = Cube(np.arange(12).reshape(3, 4))
     cube.add_aux_coord(time, (0, 1))
     constraint = TimeConstraint(hour=12)
     with self.assertRaises(CoordinateMultiDimError):
         sub_cube = constraint.extract(cube)
Exemplo n.º 16
0
 def test_simple(self):
     cube = Cube(np.array([0, 1, 2, 3, 4.4]), long_name="foo")
     dim_coord = DimCoord([5, 6, 7, 8, 9], long_name="bar")
     cube.add_dim_coord(dim_coord, 0)
     expected_index = np.array([5, 6, 7, 8, 9])
     series = iris.pandas.as_series(cube)
     self.assertArrayEqual(series, cube.data)
     self.assertArrayEqual(series.index, expected_index)
Exemplo n.º 17
0
class Test_masked(tests.IrisTest):
    def setUp(self):
        self.cube = Cube(ma.masked_greater([1, 2, 3, 4, 5], 3))
        self.cube.add_dim_coord(DimCoord([6, 7, 8, 9, 10], long_name='foo'), 0)

    def test_ma(self):
        data = MAX.aggregate(self.cube.data, axis=0)
        self.assertArrayEqual(data, [3])
Exemplo n.º 18
0
def unrolled_cube():
    data = np.arange(5, dtype='f4')
    cube = Cube(data)
    cube.add_aux_coord(iris.coords.AuxCoord([5.0, 10.0, 8.0, 5.0, 3.0],
                                            'longitude', units='degrees'), 0)
    cube.add_aux_coord(iris.coords.AuxCoord([1.0, 3.0, -2.0, -1.0, -4.0],
                                            'latitude'), 0)
    return cube
Exemplo n.º 19
0
 def _simple_cube(self, dtype):
     data = np.arange(12, dtype=dtype).reshape(3, 4)
     points = np.arange(3, dtype=dtype)
     bounds = np.arange(6, dtype=dtype).reshape(3, 2)
     cube = Cube(data, 'air_pressure_anomaly')
     coord = DimCoord(points, bounds=bounds)
     cube.add_dim_coord(coord, 0)
     return cube
Exemplo n.º 20
0
 def test_time_360(self):
     cube = Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="ts")
     time_unit = cf_units.Unit("days since 2000-01-01 00:00", calendar=cf_units.CALENDAR_360_DAY)
     time_coord = DimCoord([100.1, 200.2], long_name="time", units=time_unit)
     cube.add_dim_coord(time_coord, 0)
     data_frame = iris.pandas.as_data_frame(cube)
     self.assertArrayEqual(data_frame, cube.data)
     self.assertString(str(data_frame), tests.get_result_path(("pandas", "as_dataframe", "time_360.txt")))
Exemplo n.º 21
0
 def test_time_360(self):
     cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="ts")
     time_unit = cf_units.Unit("days since 2000-01-01 00:00", calendar=cf_units.CALENDAR_360_DAY)
     time_coord = DimCoord([0, 100.1, 200.2, 300.3, 400.4], long_name="time", units=time_unit)
     cube.add_dim_coord(time_coord, 0)
     series = iris.pandas.as_series(cube)
     self.assertArrayEqual(series, cube.data)
     self.assertString(str(series), tests.get_result_path(("pandas", "as_series", "time_360.txt")))
Exemplo n.º 22
0
 def _create_cube(self, longitudes):
     # Return a Cube with circular longitude with the given values.
     data = np.arange(12).reshape((3, 4)) * 0.1
     cube = Cube(data)
     lon = DimCoord(longitudes, standard_name='longitude',
                    units='degrees', circular=True)
     cube.add_dim_coord(lon, 1)
     return cube
Exemplo n.º 23
0
 def test_cell_datetime_objects(self):
     # Check the scalar coordinate summary still works even when
     # iris.FUTURE.cell_datetime_objects is True.
     cube = Cube(0)
     cube.add_aux_coord(AuxCoord(42, units='hours since epoch'))
     with FUTURE.context(cell_datetime_objects=True):
         summary = cube.summary()
     self.assertIn('1970-01-02 18:00:00', summary)
Exemplo n.º 24
0
 def test_3d_data(self):
     time = DimCoord(np.arange(12) * 6, 'time',
                     units='hours since 2013-10-29 18:00:00')
     cube = Cube(np.arange(12 * 5 * 6).reshape(12, 5, 6))
     cube.add_dim_coord(time, 0)
     constraint = TimeConstraint(hour=12)
     sub_cube = constraint.extract(cube)
     self.assertArrayEqual(sub_cube.coord('time').points, [18, 42, 66])
Exemplo n.º 25
0
 def cube(self, x, y):
     data = np.arange(len(x) * len(y)).reshape(len(y), len(x))
     cube = Cube(data)
     lat = DimCoord(y, 'latitude', units='degrees')
     lon = DimCoord(x, 'longitude', units='degrees')
     cube.add_dim_coord(lat, 0)
     cube.add_dim_coord(lon, 1)
     return cube
Exemplo n.º 26
0
 def test_scalar_cube_coord_nomatch(self):
     # Ensure that extract is not extracting a scalar cube with scalar
     # coordinate that does not match the constraint.
     constraint = iris.Constraint(scalar_coord=1)
     cube = Cube(1, long_name='a1')
     coord = iris.coords.AuxCoord(0, long_name='scalar_coord')
     cube.add_aux_coord(coord, None)
     res = cube.extract(constraint)
     self.assertIs(res, None)
 def test_nonlatlon_simple_1d(self):
     co_x = AuxCoord([1.0, 2.0, 3.0, 1.0, 2.0, 3.0], long_name='x')
     co_y = AuxCoord([10.0, 10.0, 10.0, 20.0, 20.0, 20.0], long_name='y')
     cube = Cube(np.zeros(6))
     cube.add_aux_coord(co_y, 0)
     cube.add_aux_coord(co_x, 0)
     sample_point = [('x', 2.8), ('y', 18.5)]
     result = nn_ndinds(cube, sample_point)
     self.assertEqual(result, [(5,)])
Exemplo n.º 28
0
 def setUp(self):
     self.data = np.arange(6.0).reshape((2, 3))
     self.lazydata = biggus.NumpyArrayAdapter(self.data)
     cube = Cube(self.lazydata)
     for i_dim, name in enumerate(('y', 'x')):
         npts = cube.shape[i_dim]
         coord = DimCoord(np.arange(npts), long_name=name)
         cube.add_dim_coord(coord, i_dim)
     self.cube = cube
Exemplo n.º 29
0
 def test_scalar_cube_coord_match(self):
     # Ensure that extract is able to extract a scalar cube according to
     # constrained scalar coordinate.
     constraint = iris.Constraint(scalar_coord=0)
     cube = Cube(1, long_name='a1')
     coord = iris.coords.AuxCoord(0, long_name='scalar_coord')
     cube.add_aux_coord(coord, None)
     res = cube.extract(constraint)
     self.assertIs(res, cube)
Exemplo n.º 30
0
 def _metadata(self, var, **attributes):
     """Re-shape outputs and add meta-data."""
     var = var.reshape(self._ishape)
     var = Cube(var,
                dim_coords_and_dims=zip(self._coords, range(var.ndim)))
     var.transpose(self._reorder)
     for attribute, value in attributes.items():
         setattr(var, attribute, value)
     return var
Exemplo n.º 31
0
 def test_copy_true(self):
     cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="foo")
     series = iris.pandas.as_series(cube)
     series[0] = 99
     self.assertEqual(cube.data[0], 0)
Exemplo n.º 32
0
 def test_copy_int64_false(self):
     cube = Cube(np.array([0, 1, 2, 3, 4], dtype=np.int32), long_name="foo")
     series = iris.pandas.as_series(cube, copy=False)
     series[0] = 99
     self.assertEqual(cube.data[0], 99)
Exemplo n.º 33
0
def empty_3d_cube_aps2(data, name=None, unit=None, stash=None, **kwargs):
    """
    Prepare some iris cubes at APS2 grids for testing
    """
    if data is None:
        data = np.empty([2, 2])

    cube = Cube(data)

    time = AuxCoord([0], 'time', units='hours since epoch')

    latitude = DimCoord([6.328125, 6.5625, 6.796875, 7.03125],
                        standard_name='latitude', units='degrees')

    longitude = DimCoord([81.211053, 81.562616, 81.914179],
                         standard_name='longitude', units='degrees')

    cube.add_dim_coord(latitude, 0)
    cube.add_dim_coord(longitude, 1)
    cube.add_aux_coord(time)

    if name:
        cube.long_name = name
    if unit:
        cube.units = unit
    if stash:
        cube.attributes['STASH'] = stash

    return cube
Exemplo n.º 34
0
def empty_3d_cube_aps3(data, name=None, unit=None, stash=None, **kwargs):
    """
    Prepare some iris cubes at APS3 grids for testing
    """
    if data is None:
        data = np.empty([6, 6])

    cube = Cube(data)

    time = AuxCoord([0], 'time', units='hours since epoch')

    latitude = DimCoord([6.26953125, 6.38671875, 6.50390625, 6.62109375,
                         6.73828125, 6.85546875, 6.97265625, 7.08984375],
                        standard_name='latitude', units='degrees')

    longitude = DimCoord([81.12304688, 81.29882812, 81.47460938,
                          81.65039062, 81.82617188, 82.00195312],
                         standard_name='longitude', units='degrees')

    cube.add_dim_coord(latitude, 0)
    cube.add_dim_coord(longitude, 1)
    cube.add_aux_coord(time)

    if name:
        cube.long_name = name
    if unit:
        cube.units = unit
    if stash:
        cube.attributes['STASH'] = stash

    return cube
Exemplo n.º 35
0
 def test_copy_float_false(self):
     cube = Cube(np.array([0, 1, 2, 3.3, 4]), long_name="foo")
     series = iris.pandas.as_series(cube, copy=False)
     series[0] = 99
     self.assertEqual(cube.data[0], 99)
Exemplo n.º 36
0
def empty_model_level_cube(data, name=None, unit=None, stash=None, **kwargs):
    """
    Create a model_level cube from input data.
    """
    if data is None:
        data = np.empty([1, 3, 8, 6])
    assert data.shape == (3, 8, 6)
    # Make axis=0 for time dim_coord
    new_data = data[np.newaxis, :]
    cube = Cube(new_data)

    # time = AuxCoord([0], 'time', units='hours since epoch')
    time = DimCoord([0], 'time', units='hours since epoch')

    # model = DimCoord([1, 2, 3], 'model_level_number',
    #                  attributes={'positive': 'up'})
    model = DimCoord([1, 2, 3], 'air_pressure',
                     attributes={'positive': 'up'})
    latitude = DimCoord([6.26953125, 6.38671875, 6.50390625, 6.62109375,
                         6.73828125, 6.85546875, 6.97265625, 7.08984375],
                        standard_name='latitude', units='degrees')

    longitude = DimCoord([81.12304688, 81.29882812, 81.47460938,
                          81.65039062, 81.82617188, 82.00195312],
                         standard_name='longitude', units='degrees')

    level_heights = np.array([20., 53.336, 100.])
    level_height = DimCoord(level_heights, long_name='level_height', units='m')
    surface = AuxCoord(topo_aps3.data, 'surface_altitude', units='m')

    sigma = AuxCoord([0.99772321, 0.99393402, 0.98864199], long_name='sigma')

    cube.add_dim_coord(time, 0)
    cube.add_dim_coord(model, 1)
    cube.add_dim_coord(latitude, 2)
    cube.add_dim_coord(longitude, 3)

    cube.add_aux_coord(level_height, 1)
    cube.add_aux_coord(sigma, 1)
    cube.add_aux_coord(surface, (2, 3))

    # Now that we have all of the necessary information, construct a
    # HybridHeight derived "altitude" coordinate.
    cube.add_aux_factory(HybridHeightFactory(level_height, sigma, surface))

    if name:
        cube.long_name = name
    if unit:
        cube.units = unit
    if stash:
        cube.attributes['STASH'] = stash

    return cube
Exemplo n.º 37
0
 def test_copy_false_with_cube_view(self):
     data = np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]])
     cube = Cube(data[:], long_name="foo")
     data_frame = iris.pandas.as_data_frame(cube, copy=False)
     data_frame[0][0] = 99
     self.assertEqual(cube.data[0, 0], 99)
Exemplo n.º 38
0
def create_new_diagnostic_cube(
    name: str,
    units: Union[Unit, str],
    template_cube: Cube,
    mandatory_attributes: Union[Dict[str, str], LimitedAttributeDict],
    optional_attributes: Optional[Union[Dict[str, str],
                                        LimitedAttributeDict]] = None,
    data: Optional[Union[MaskedArray, ndarray]] = None,
    dtype: Type = np.float32,
) -> Cube:
    """
    Creates a new diagnostic cube with suitable metadata.

    Args:
        name:
            Standard or long name for output cube
        units:
            Units for output cube
        template_cube:
            Cube from which to copy dimensional and auxiliary coordinates
        mandatory_attributes:
            Dictionary containing values for the mandatory attributes
            "title", "source" and "institution".  These are overridden by
            values in the optional_attributes dictionary, if specified.
        optional_attributes:
            Dictionary of optional attribute names and values.  If values for
            mandatory attributes are included in this dictionary they override
            the values of mandatory_attributes.
        data:
            Data array.  If not set, cube is filled with zeros using a lazy
            data object, as this will be overwritten later by the caller
            routine.
        dtype:
            Datatype for dummy cube data if "data" argument is None.

    Returns:
        Cube with correct metadata to accommodate new diagnostic field
    """
    attributes = mandatory_attributes
    if optional_attributes is not None:
        attributes.update(optional_attributes)

    error_msg = ""
    for attr in MANDATORY_ATTRIBUTES:
        if attr not in attributes:
            error_msg += "{} attribute is required\n".format(attr)
    if error_msg:
        raise ValueError(error_msg)

    if data is None:
        data = da.zeros_like(template_cube.core_data(), dtype=dtype)

    aux_coords_and_dims, dim_coords_and_dims = [[
        (coord.copy(), template_cube.coord_dims(coord))
        for coord in getattr(template_cube, coord_type)
    ] for coord_type in ("aux_coords", "dim_coords")]

    cube = iris.cube.Cube(
        data,
        units=units,
        attributes=attributes,
        dim_coords_and_dims=dim_coords_and_dims,
        aux_coords_and_dims=aux_coords_and_dims,
    )
    cube.rename(name)

    return cube
Exemplo n.º 39
0
def convertP(year, month, ndays, remove=False):
    """
    Now need to:
    - aggregate to daily averages
    - regrid to 1by1 gridboxes
    """

    MDI = -999.

    # Set up null_cube with desired gridding format to use as a template
    # Does this have to have the same time dimensions?
    #    ndays = np.int(p_cube.data[:,0,0] / 24)

    time = DimCoord(np.arange(ndays * 24), standard_name='time', units='hours')
    latitude = DimCoord(
        np.linspace(89.5, -89.5, 180),
        #    latitude = DimCoord(np.linspace(90, -90, 181),
        standard_name='latitude',
        long_name='gridbox centre latitude',
        units='degrees_north')
    longitude = DimCoord(
        np.linspace(-179.5, 179.5, 360),
        #    longitude = DimCoord(np.linspace(0, 359, 360),
        standard_name='longitude',
        long_name='gridbox centre longitude',
        units='degrees_east')
    null_cube = Cube(np.zeros((ndays * 24, 180, 360), np.float32),
                     dim_coords_and_dims=[(time, 0), (latitude, 1),
                                          (longitude, 2)])
    print('Check null_cube for new grid')
    #    pdb.set_trace()

    ## START OF LSM************************************************
    #    # read in land_sea_mask
    #    variable = "land_sea_mask"
    #    lsm_cube = iris.load(os.path.join(DataLoc, "{}{:02d}_hourly_{}.nc".format(year, month, variable)))
    #    #pdb.set_trace()
    #    # convert from list to cube
    #    lsm_cube = lsm_cube[0]#
    #
    ## REgrid to 1by1 degree - cash the source, template, gridding type for later use - faster
    #    regridder = iris.analysis.Linear().regridder(lsm_cube, null_cube)
    #    lsm_cube_1by1 = regridder(lsm_cube)
    #    print('Check lsm_cube_1by1 for new grid')
    ##    pdb.set_trace()#
    #
    #    # remove old cube
    #    lsm_cube = 0
    #
    #    lsm_cube_1by1 = lsm_cube_1by1[0,:,:]
    ##    lsm_cube_1by1_field = lsm_cube_1by1.extract(iris.Constraint(time=0))
    #    lsm_cube_1by1.units = "1"
    #    print(lsm_cube_1by1)
    #    print('Check lsm_cube_1by1 for 2m_temperature')
    #    #pdb.set_trace()
    #
    ## output
    #    iris.save(lsm_cube_1by1, os.path.join(DataLoc, "{}{:02d}_{}.nc".format(year, month, variable)), zlib=True)
    #    print('Check lsm_cube_1by1 output')
    #    pdb.set_trace()
    ## END OF LSM************************************************************

    variable = "surface_pressure"
    p_cube = iris.load(
        os.path.join(DataLoc,
                     "{}{:02d}_hourly_{}.nc".format(year, month, variable)))
    # convert from list to cube
    p_cube = p_cube[0]

    # REgrid to 1by1 degree - cash the source, template, gridding type for later use - faster
    regridder = iris.analysis.Linear().regridder(p_cube, null_cube)
    p_cube_1by1 = regridder(p_cube)
    print('Check p_cube_1by1 for new grid')
    #    pdb.set_trace()

    # remove old cube
    p_cube = 0

    p_cube_1by1.data /= 100.  # convert to C
    p_cube_1by1.units = "hPa"
    print('Check p_cube_1by1 for surface_pressure')
    #    pdb.set_trace()

    # No masking internally within this code...
    p_cube_1by1.fill_value = MDI  # not sure whether we're doing -999 yet if saving as integer
    p_cube_1by1.units = cf_units.Unit("hPa")
    p_cube_1by1.var_name = "p2m"
    p_cube_1by1.long_name = "2 metre surface pressure"

    # Aggregate to daily
    # add a "day" indicator to allow aggregation
    iris.coord_categorisation.add_day_of_month(p_cube_1by1,
                                               "time",
                                               name="day_of_month")

    p_cube_day = p_cube_1by1.aggregated_by(["day_of_month"],
                                           iris.analysis.MEAN)
    p_cube_1by1 = 0
    p_cube_day.remove_coord("day_of_month")
    p_cube_day.units = cf_units.Unit("hPa")
    p_cube_day.var_name = "p2m"
    print('Check p_cube for daily averages')
    #    pdb.set_trace()

    # output
    iris.save(p_cube_day,
              os.path.join(
                  DataLoc,
                  "{}{:02d}_daily_{}.nc".format(year, month, variable)),
              zlib=True)
    p_cube_day = 0
    print('Check p_cube_1by1 output')
    #    pdb.set_trace()

    # remove input files
    if remove:
        os.remove(
            os.path.join(DataLoc,
                         "{}{:02d}_hourly_{}.nc".format(year, month,
                                                        variable)))

    return  # combine
 def setUp(self):
     # A (3, 2, 4) cube with a masked element.
     cube = Cube(np.ma.arange(24, dtype=np.int32).reshape((3, 2, 4)))
     cs = GeogCS(6371229)
     coord = DimCoord(points=np.array([-1, 0, 1], dtype=np.int32),
                      standard_name='latitude',
                      units='degrees',
                      coord_system=cs)
     cube.add_dim_coord(coord, 0)
     coord = DimCoord(points=np.array([-1, 0, 1, 2], dtype=np.int32),
                      standard_name='longitude',
                      units='degrees',
                      coord_system=cs)
     cube.add_dim_coord(coord, 2)
     cube.coord('latitude').guess_bounds()
     cube.coord('longitude').guess_bounds()
     cube.data[1, 1, 2] = ma.masked
     self.src_cube = cube
     # Create (7, 2, 9) grid cube.
     self.grid_cube = _resampled_grid(cube, 2.3, 2.4)
Exemplo n.º 41
0
    def _create_cube(self, filenames, variable):
        import numpy as np
        from cis.data_io.hdf import _read_hdf4
        from cis.data_io import hdf_vd
        from iris.cube import Cube, CubeList
        from iris.coords import DimCoord, AuxCoord
        from cis.time_util import calculate_mid_time, cis_standard_time_unit
        from cis.data_io.hdf_sd import get_metadata
        from cf_units import Unit

        variables = ['XDim:GlobalGrid', 'YDim:GlobalGrid', variable]
        logging.info("Listing coordinates: " + str(variables))

        cube_list = CubeList()
        # Read each file individually, let Iris do the merging at the end.
        for f in filenames:
            sdata, vdata = _read_hdf4(f, variables)

            lat_points = np.linspace(-90., 90.,
                                     hdf_vd.get_data(vdata['YDim:GlobalGrid']))
            lon_points = np.linspace(-180., 180.,
                                     hdf_vd.get_data(vdata['XDim:GlobalGrid']))

            lat_coord = DimCoord(lat_points,
                                 standard_name='latitude',
                                 units='degrees')
            lon_coord = DimCoord(lon_points,
                                 standard_name='longitude',
                                 units='degrees')

            # create time coordinate using the midpoint of the time delta between the start date and the end date
            start_datetime = self._get_start_date(f)
            end_datetime = self._get_end_date(f)
            mid_datetime = calculate_mid_time(start_datetime, end_datetime)
            logging.debug("Using {} as datetime for file {}".format(
                mid_datetime, f))
            time_coord = AuxCoord(mid_datetime,
                                  standard_name='time',
                                  units=cis_standard_time_unit,
                                  bounds=[start_datetime, end_datetime])

            var = sdata[variable]
            metadata = get_metadata(var)

            try:
                units = Unit(metadata.units)
            except ValueError:
                logging.warning(
                    "Unable to parse units '{}' in {} for {}.".format(
                        metadata.units, f, variable))
                units = None

            cube = Cube(_get_MODIS_SDS_data(sdata[variable]),
                        dim_coords_and_dims=[(lon_coord, 1), (lat_coord, 0)],
                        aux_coords_and_dims=[(time_coord, None)],
                        var_name=metadata._name,
                        long_name=metadata.long_name,
                        units=units)

            cube_list.append(cube)

        # Merge the cube list across the scalar time coordinates before returning a single cube.
        return cube_list.merge_cube()
Exemplo n.º 42
0
 def test_masked(self):
     data = np.ma.MaskedArray([0, 1, 2, 3, 4.4], mask=[0, 1, 0, 1, 0])
     cube = Cube(data, long_name="foo")
     series = iris.pandas.as_series(cube)
     self.assertArrayEqual(series, cube.data.astype("f").filled(np.nan))
Exemplo n.º 43
0
 def test_copy_masked_true(self):
     data = np.ma.MaskedArray([0, 1, 2, 3, 4], mask=[0, 1, 0, 1, 0])
     cube = Cube(data, long_name="foo")
     series = iris.pandas.as_series(cube)
     series[0] = 99
     self.assertEqual(cube.data[0], 0)
Exemplo n.º 44
0
 def test_copy_true(self):
     cube = Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]),
                 long_name="foo")
     data_frame = iris.pandas.as_data_frame(cube)
     data_frame[0][0] = 99
     self.assertEqual(cube.data[0, 0], 0)
Exemplo n.º 45
0
    def process(self, cube: Cube, smoothing_coefficients: CubeList) -> Cube:
        """
        Set up the smoothing_coefficient parameters and run the recursive
        filter. Smoothing coefficients can be generated using
        :class:`~.OrographicSmoothingCoefficients`
        and :func:`~improver.cli.generate_orographic_smoothing_coefficients`.
        The steps undertaken are:

        1. Split the input cube into slices determined by the co-ordinates in
           the x and y directions.
        2. Construct an array of filter parameters (smoothing_coefficients_x
           and smoothing_coefficients_y) for each cube slice that are used to
           weight the recursive filter in the x- and y-directions.
        3. Pad each cube slice with a square-neighbourhood halo and apply
           the recursive filter for the required number of iterations.
        4. Remove the halo from the cube slice and append the recursed cube
           slice to a 'recursed cube'.
        5. Merge all the cube slices in the 'recursed cube' into a 'new cube'.
        6. Modify the 'new cube' so that its scalar dimension co-ordinates are
           consistent with those in the original input cube.
        7. Return the 'new cube' which now contains the recursively filtered
           values for the original input cube.

        The smoothing_coefficient determines how much "value" of a cell
        undergoing filtering is comprised of the current value at that cell and
        how much comes from the adjacent cell preceding it in the direction in
        which filtering is being applied. A larger smoothing_coefficient
        results in a more significant proportion of a cell's new value coming
        from its neighbouring cell.

        Args:
            cube:
                Cube containing the input data to which the recursive filter
                will be applied.
            smoothing_coefficients:
                A cubelist containing two cubes of smoothing_coefficient values,
                one corresponding to smoothing in the x-direction, and the other
                to smoothing in the y-direction.

        Returns:
            Cube containing the smoothed field after the recursive filter
            method has been applied.

        Raises:
            ValueError:
                If the cube contains masked data from multiple cycles or times
        """
        cube_format = next(
            cube.slices([cube.coord(axis="y"),
                         cube.coord(axis="x")]))
        coeffs_x, coeffs_y = self._validate_coefficients(
            cube_format, smoothing_coefficients)

        mask_cube = None
        if np.ma.is_masked(cube.data):
            # Assumes mask is the same for each x-y slice.  This may not be
            # true if there are several time slices in the cube - so throw
            # an error if this is so.
            for coord in TIME_COORDS:
                if cube.coords(coord) and len(cube.coord(coord).points) > 1:
                    raise ValueError(
                        "Dealing with masks from multiple time points is unsupported"
                    )

            mask_cube = cube_format.copy(data=cube_format.data.mask)
            coeffs_x, coeffs_y = self._update_coefficients_from_mask(
                coeffs_x,
                coeffs_y,
                mask_cube,
            )

        padded_coefficients_x, padded_coefficients_y = self._pad_coefficients(
            coeffs_x, coeffs_y)

        recursed_cube = iris.cube.CubeList()
        for output in cube.slices([cube.coord(axis="y"),
                                   cube.coord(axis="x")]):

            padded_cube = pad_cube_with_halo(output,
                                             2 * self.edge_width,
                                             2 * self.edge_width,
                                             pad_method="symmetric")

            new_cube = self._run_recursion(
                padded_cube,
                padded_coefficients_x,
                padded_coefficients_y,
                self.iterations,
            )
            new_cube = remove_halo_from_cube(new_cube, 2 * self.edge_width,
                                             2 * self.edge_width)

            if mask_cube is not None:
                new_cube.data = np.ma.MaskedArray(new_cube.data,
                                                  mask=mask_cube.data)

            recursed_cube.append(new_cube)

        new_cube = recursed_cube.merge_cube()
        new_cube = check_cube_coordinates(cube, new_cube)

        return new_cube
Exemplo n.º 46
0
 def test_copy_float_false(self):
     cube = Cube(np.array([[0, 1, 2, 3, 4.4], [5, 6, 7, 8, 9]]),
                 long_name="foo")
     data_frame = iris.pandas.as_data_frame(cube, copy=False)
     data_frame[0][0] = 99
     self.assertEqual(cube.data[0, 0], 99)
def test_curvilinear():
    """
    Test for :func:`esmf_regrid.experimental.unstructured_scheme.regrid_rectilinear_to_unstructured`.

    Tests with curvilinear target cube.
    """
    tgt = _flat_mesh_cube()
    mesh = tgt.mesh
    mesh_length = mesh.connectivity(contains_face=True).shape[0]
    n_lons = 6
    n_lats = 5
    lon_bounds = (-180, 180)
    lat_bounds = (-90, 90)
    grid = _curvilinear_cube(n_lons, n_lats, lon_bounds, lat_bounds)

    h = 2
    t = 3
    height = DimCoord(np.arange(h), standard_name="height")
    time = DimCoord(np.arange(t), standard_name="time")

    src_data = np.empty([t, n_lats, n_lons, h])
    src_data[:] = np.arange(t * h).reshape([t, h])[:, np.newaxis,
                                                   np.newaxis, :]
    cube = Cube(src_data)
    cube.add_aux_coord(grid.coord("latitude"), [1, 2])
    cube.add_aux_coord(grid.coord("longitude"), [1, 2])
    cube.add_dim_coord(time, 0)
    cube.add_dim_coord(height, 3)

    regridder = GridToMeshESMFRegridder(grid, tgt)
    result = regridder(cube)

    # Lenient check for data.
    expected_data = np.empty([t, mesh_length, h])
    expected_data[:] = np.arange(t * h).reshape(t, h)[:, np.newaxis, :]
    assert np.allclose(expected_data, result.data)

    mesh_coord_x, mesh_coord_y = mesh.to_MeshCoords("face")
    expected_cube = Cube(expected_data)
    expected_cube.add_dim_coord(time, 0)
    expected_cube.add_aux_coord(mesh_coord_x, 1)
    expected_cube.add_aux_coord(mesh_coord_y, 1)
    expected_cube.add_dim_coord(height, 2)

    # Check metadata and scalar coords.
    result.data = expected_data
    assert expected_cube == result
Exemplo n.º 48
0
 def test_copy_masked_false(self):
     data = np.ma.MaskedArray([0, 1, 2, 3, 4], mask=[0, 1, 0, 1, 0])
     cube = Cube(data, long_name="foo")
     with self.assertRaises(ValueError):
         _ = iris.pandas.as_series(cube, copy=False)
Exemplo n.º 49
0
 def setUp(self):
     self.shape = (2, 3, 4)
     self.cube = Cube(np.arange(24).reshape(self.shape))
     self.representer = CubeRepresentation(self.cube)
     self.representer.repr_html()
Exemplo n.º 50
0
def convert(year, month, ndays, remove=False):
    """
    Now need to:
    - convert to q, RH , e, tw, DPD
    - aggregate to daily averages
    - regrid to 1by1 gridboxes
    """

    MDI = -999.

    # Set up null_cube with desired gridding format to use as a template
    # Does this have to have the same time dimensions?
    #    ndays = np.int(p_cube.data[:,0,0] / 24)

    time = DimCoord(np.arange(ndays * 24), standard_name='time', units='hours')
    latitude = DimCoord(
        np.linspace(89.5, -89.5, 180),
        #    latitude = DimCoord(np.linspace(90, -90, 181),
        standard_name='latitude',
        long_name='gridbox centre latitude',
        units='degrees_north')
    longitude = DimCoord(
        np.linspace(-179.5, 179.5, 360),
        #    longitude = DimCoord(np.linspace(0, 359, 360),
        standard_name='longitude',
        long_name='gridbox centre longitude',
        units='degrees_east')
    null_cube = Cube(np.zeros((ndays * 24, 180, 360), np.float32),
                     dim_coords_and_dims=[(time, 0), (latitude, 1),
                                          (longitude, 2)])
    print('Check null_cube for new grid')
    #    pdb.set_trace()

    ## START OF LSM************************************************
    #    # read in land_sea_mask
    #    variable = "land_sea_mask"
    #    lsm_cube = iris.load(os.path.join(DataLoc, "{}{:02d}_hourly_{}.nc".format(year, month, variable)))
    #    #pdb.set_trace()
    #    # convert from list to cube
    #    lsm_cube = lsm_cube[0]#
    #
    ## REgrid to 1by1 degree - cash the source, template, gridding type for later use - faster
    #    regridder = iris.analysis.Linear().regridder(lsm_cube, null_cube)
    #    lsm_cube_1by1 = regridder(lsm_cube)
    #    print('Check lsm_cube_1by1 for new grid')
    ##    pdb.set_trace()#
    #
    #    # remove old cube
    #    lsm_cube = 0
    #
    #    lsm_cube_1by1 = lsm_cube_1by1[0,:,:]
    ##    lsm_cube_1by1_field = lsm_cube_1by1.extract(iris.Constraint(time=0))
    #    lsm_cube_1by1.units = "1"
    #    print(lsm_cube_1by1)
    #    print('Check lsm_cube_1by1 for 2m_temperature')
    #    #pdb.set_trace()
    #
    ## output
    #    iris.save(lsm_cube_1by1, os.path.join(DataLoc, "{}{:02d}_{}.nc".format(year, month, variable)), zlib=True)
    #    print('Check lsm_cube_1by1 output')
    #    pdb.set_trace()
    ## END OF LSM************************************************************

    # read in t, td and sp (may be VERY LARGE
    variable = "2m_temperature"
    t_cube = iris.load(
        os.path.join(DataLoc,
                     "{}{:02d}_hourly_{}.nc".format(year, month, variable)))
    #pdb.set_trace()
    # convert from list to cube
    t_cube = t_cube[0]

    # REgrid to 1by1 degree - cash the source, template, gridding type for later use - faster
    regridder = iris.analysis.Linear().regridder(t_cube, null_cube)
    t_cube_1by1 = regridder(t_cube)
    print('Check t_cube_1by1 for new grid')
    #    pdb.set_trace()

    # remove old cube
    t_cube = 0

    t_cube_1by1.data -= 273.15  # convert to C
    t_cube_1by1.units = "degreesC"
    print('Check t_cube_1by1 for 2m_temperature')
    #pdb.set_trace()

    variable = "2m_dewpoint_temperature"
    td_cube = iris.load(
        os.path.join(DataLoc,
                     "{}{:02d}_hourly_{}.nc".format(year, month, variable)))
    # convert from list to cube
    td_cube = td_cube[0]

    # REgrid to 1by1 degree - cash the source, template, gridding type for later use - faster
    td_cube_1by1 = regridder(td_cube)
    print('Check td_cube_1by1 for new grid')
    #    pdb.set_trace()

    # remove old cube
    td_cube = 0

    td_cube_1by1.data -= 273.15  # convert to C
    td_cube_1by1.units = "degreesC"
    print('Check td_cube_1by1 for 2m_dewpoint_temperature')
    #    pdb.set_trace()

    variable = "surface_pressure"
    p_cube = iris.load(
        os.path.join(DataLoc,
                     "{}{:02d}_hourly_{}.nc".format(year, month, variable)))
    # convert from list to cube
    p_cube = p_cube[0]

    # REgrid to 1by1 degree - cash the source, template, gridding type for later use - faster
    p_cube_1by1 = regridder(p_cube)
    print('Check p_cube_1by1 for new grid')
    #    pdb.set_trace()

    # remove old cube
    p_cube = 0

    p_cube_1by1.data /= 100.  # convert to C
    p_cube_1by1.units = "hPa"
    print('Check p_cube_1by1 for surface_pressure')
    #    pdb.set_trace()

    #    # if it contains 2 cubes where we have downloaded mask and wish to mask to land or sea....
    #    if len(p_cubelist) == 2:
    #        # extract both cubes
    #        pcube1 = p_cubelist[0]
    #        pcube2 = p_cubelist[1]#
    #
    #        masked1, = np.where(pcube1.data.mask[:, 0, 0] == True)
    #        masked2, = np.where(pcube2.data.mask[:, 0, 0] == True)
    #
    #        # use locations of masks to overwrite
    #        tp_cube = pcube1[:]
    #        tp_cube.data[masked1] = pcube2.data[masked1]
    #        tp_cube.var_name = "tp"
    #
    #    # else it's just a single cube, so easier to deal with
    #    elif len(p_cubelist) == 1:#
    #
    #        tp_cube = p_cubelist[0]
    #        tp_cube.var_name = "tp"

    # No masking internally within this code...
    # Process q
    # Copy the t_cube and then change some of the fields?
    variable = 'specific_humidity'
    q_cube = t_cube_1by1.copy()
    q_cube.fill_value = MDI  # not sure whether we're doing -999 yet if saving as integer
    q_cube.units = cf_units.Unit("g kg-2")
    q_cube.var_name = "q2m"
    q_cube.long_name = "2 metre specific humidity"

    # Populate the q data
    q_cube.data = ch.sh(td_cube_1by1.data,
                        t_cube_1by1.data,
                        p_cube_1by1.data,
                        roundit=False)
    print('Check q_cube for new data')
    #    pdb.set_trace()

    ## mask all regions which are 100% ocean
    #cube.data[lsm.data == 0] = utils.MDI
    #cube.data = np.ma.masked_where(lsm.data == 0, cube.data)
    #cube.data.fill_value = utils.MDI

    # Aggregate to daily
    # add a "day" indicator to allow aggregation
    iris.coord_categorisation.add_day_of_month(q_cube,
                                               "time",
                                               name="day_of_month")

    q_cube_day = q_cube.aggregated_by(["day_of_month"], iris.analysis.MEAN)
    q_cube = 0
    q_cube_day.remove_coord("day_of_month")
    q_cube_day.units = cf_units.Unit("g kg-2")
    print('Check q_cube for daily averages')
    #    pdb.set_trace()

    # output
    iris.save(q_cube_day,
              os.path.join(
                  DataLoc,
                  "{}{:02d}_daily_{}.nc".format(year, month, variable)),
              zlib=True)
    q_cube_day = 0
    print('Check q_cube_1by1 output')
    #    pdb.set_trace()

    # Process RH
    # Copy the t_cube and then change some of the fields?
    variable = 'relative_humidity'
    rh_cube = t_cube_1by1.copy()
    rh_cube.fill_value = MDI  # not sure whether we're doing -999 yet if saving as integer
    rh_cube.units = cf_units.Unit("%")
    rh_cube.var_name = "rh2m"
    rh_cube.long_name = "2 metre relative humidity"

    # Populate the q data
    rh_cube.data = ch.rh(td_cube_1by1.data,
                         t_cube_1by1.data,
                         p_cube_1by1.data,
                         roundit=False)
    print('Check rh_cube for new data')
    #    pdb.set_trace()

    ## mask all regions which are 100% ocean
    #cube.data[lsm.data == 0] = utils.MDI
    #cube.data = np.ma.masked_where(lsm.data == 0, cube.data)
    #cube.data.fill_value = utils.MDI

    # Aggregate to daily
    # add a "day" indicator to allow aggregation
    iris.coord_categorisation.add_day_of_month(rh_cube,
                                               "time",
                                               name="day_of_month")

    rh_cube_day = rh_cube.aggregated_by(["day_of_month"], iris.analysis.MEAN)
    rh_cube = 0
    rh_cube_day.remove_coord("day_of_month")
    rh_cube_day.units = cf_units.Unit("%")
    print('Check rh_cube for daily averages')
    #    pdb.set_trace()

    # output
    iris.save(rh_cube_day,
              os.path.join(
                  DataLoc,
                  "{}{:02d}_daily_{}.nc".format(year, month, variable)),
              zlib=True)
    rh_cube_day = 0
    print('Check rh_cube_1by1 output')
    #pdb.set_trace()

    # Process e
    # Copy the t_cube and then change some of the fields?
    variable = 'vapour_pressure'
    e_cube = t_cube_1by1.copy()
    e_cube.fill_value = MDI  # not sure whether we're doing -999 yet if saving as integer
    e_cube.units = cf_units.Unit("hPa")
    e_cube.var_name = "e2m"
    e_cube.long_name = "2 metre vapour pressure"

    # Populate the q data
    e_cube.data = ch.vap(td_cube_1by1.data,
                         t_cube_1by1.data,
                         p_cube_1by1.data,
                         roundit=False)
    print('Check e_cube for new data')
    #    pdb.set_trace()

    ## mask all regions which are 100% ocean
    #cube.data[lsm.data == 0] = utils.MDI
    #cube.data = np.ma.masked_where(lsm.data == 0, cube.data)
    #cube.data.fill_value = utils.MDI

    # Aggregate to daily
    # add a "day" indicator to allow aggregation
    iris.coord_categorisation.add_day_of_month(e_cube,
                                               "time",
                                               name="day_of_month")

    e_cube_day = e_cube.aggregated_by(["day_of_month"], iris.analysis.MEAN)
    e_cube = 0
    e_cube_day.remove_coord("day_of_month")
    e_cube_day.units = cf_units.Unit("hPa")
    print('Check e_cube for daily averages')
    #    pdb.set_trace()

    # output
    iris.save(e_cube_day,
              os.path.join(
                  DataLoc,
                  "{}{:02d}_daily_{}.nc".format(year, month, variable)),
              zlib=True)
    e_cube_day = 0
    print('Check e_cube_1by1 output')
    #    pdb.set_trace()

    # Process tw
    # Copy the t_cube and then change some of the fields?
    variable = 'wetbulb_temperature'
    tw_cube = t_cube_1by1.copy()
    tw_cube.fill_value = MDI  # not sure whether we're doing -999 yet if saving as integer
    tw_cube.units = cf_units.Unit("degrees C")
    tw_cube.var_name = "tw2m"
    tw_cube.long_name = "2 metre wetbulb temperature"

    # Populate the q data
    tw_cube.data = ch.wb(td_cube_1by1.data,
                         t_cube_1by1.data,
                         p_cube_1by1.data,
                         roundit=False)
    print('Check tw_cube for new data')
    #    pdb.set_trace()

    ## mask all regions which are 100% ocean
    #cube.data[lsm.data == 0] = utils.MDI
    #cube.data = np.ma.masked_where(lsm.data == 0, cube.data)
    #cube.data.fill_value = utils.MDI

    # Aggregate to daily
    # add a "day" indicator to allow aggregation
    iris.coord_categorisation.add_day_of_month(tw_cube,
                                               "time",
                                               name="day_of_month")

    tw_cube_day = tw_cube.aggregated_by(["day_of_month"], iris.analysis.MEAN)
    tw_cube = 0
    tw_cube_day.remove_coord("day_of_month")
    tw_cube_day.units = cf_units.Unit("degrees C")
    print('Check tw_cube for daily averages')
    #    pdb.set_trace()

    # output
    iris.save(tw_cube_day,
              os.path.join(
                  DataLoc,
                  "{}{:02d}_daily_{}.nc".format(year, month, variable)),
              zlib=True)
    tw_cube_day = 0
    print('Check tw_cube_1by1 output')
    #    pdb.set_trace()

    # Process dpd
    # Copy the t_cube and then change some of the fields?
    variable = 'dewpoint_depression'
    dpd_cube = t_cube_1by1.copy()
    dpd_cube.fill_value = MDI  # not sure whether we're doing -999 yet if saving as integer
    dpd_cube.units = cf_units.Unit("degrees C")
    dpd_cube.var_name = "dpd2m"
    dpd_cube.long_name = "2 metre dewpoint depression"

    # Populate the q data
    dpd_cube.data = ch.dpd(td_cube_1by1.data, t_cube_1by1.data, roundit=False)
    print('Check dpd_cube for new data')
    #    pdb.set_trace()

    ## mask all regions which are 100% ocean
    #cube.data[lsm.data == 0] = utils.MDI
    #cube.data = np.ma.masked_where(lsm.data == 0, cube.data)
    #cube.data.fill_value = utils.MDI

    # Aggregate to daily
    # add a "day" indicator to allow aggregation
    iris.coord_categorisation.add_day_of_month(dpd_cube,
                                               "time",
                                               name="day_of_month")

    dpd_cube_day = dpd_cube.aggregated_by(["day_of_month"], iris.analysis.MEAN)
    dpd_cube = 0
    dpd_cube_day.remove_coord("day_of_month")
    dpd_cube_day.units = cf_units.Unit("degrees C")
    print('Check dpd_cube for daily averages')
    #    pdb.set_trace()

    # output
    iris.save(dpd_cube_day,
              os.path.join(
                  DataLoc,
                  "{}{:02d}_daily_{}.nc".format(year, month, variable)),
              zlib=True)
    dpd_cube_day = 0
    print('Check dpd_cube_1by1 output')
    #    pdb.set_trace()

    # Process Td
    variable = '2m_dewpoint_temperature'
    # Aggregate to daily
    # add a "day" indicator to allow aggregation
    iris.coord_categorisation.add_day_of_month(td_cube_1by1,
                                               "time",
                                               name="day_of_month")

    td_cube_day = td_cube_1by1.aggregated_by(["day_of_month"],
                                             iris.analysis.MEAN)
    td_cube_1by1 = 0
    td_cube_day.remove_coord("day_of_month")
    td_cube_day.units = cf_units.Unit("degrees C")
    td_cube_day.var_name = "td2m"
    print('Check td_cube for daily averages')
    #    pdb.set_trace()

    # output
    iris.save(td_cube_day,
              os.path.join(
                  DataLoc,
                  "{}{:02d}_daily_{}.nc".format(year, month, variable)),
              zlib=True)
    td_cube_day = 0
    print('Check td_cube_1by1 output')
    #    pdb.set_trace()

    # Process T
    variable = '2m_temperature'
    # Aggregate to daily
    # add a "day" indicator to allow aggregation
    iris.coord_categorisation.add_day_of_month(t_cube_1by1,
                                               "time",
                                               name="day_of_month")

    t_cube_day = t_cube_1by1.aggregated_by(["day_of_month"],
                                           iris.analysis.MEAN)
    t_cube_1by1 = 0
    t_cube_day.remove_coord("day_of_month")
    t_cube_day.units = cf_units.Unit("degrees C")
    t_cube_day.var_name = "t2m"
    print('Check t_cube for daily averages')
    #    pdb.set_trace()

    # output
    iris.save(t_cube_day,
              os.path.join(
                  DataLoc,
                  "{}{:02d}_daily_{}.nc".format(year, month, variable)),
              zlib=True)
    t_cube_day = 0
    print('Check t_cube_1by1 output')
    #    pdb.set_trace()

    # Process P
    variable = 'surface_pressure'
    # Aggregate to daily
    # add a "day" indicator to allow aggregation
    iris.coord_categorisation.add_day_of_month(p_cube_1by1,
                                               "time",
                                               name="day_of_month")

    p_cube_day = p_cube_1by1.aggregated_by(["day_of_month"],
                                           iris.analysis.MEAN)
    p_cube_1by1 = 0
    p_cube_day.remove_coord("day_of_month")
    p_cube_day.units = cf_units.Unit("hPa")
    p_cube_day.var_name = "p2m"
    print('Check p_cube for daily averages')
    #    pdb.set_trace()

    # output
    iris.save(p_cube_day,
              os.path.join(
                  DataLoc,
                  "{}{:02d}_daily_{}.nc".format(year, month, variable)),
              zlib=True)
    p_cube_day = 0
    print('Check p_cube_1by1 output')
    #    pdb.set_trace()

    #    # append precipitation cube to temperature one
    #    cubelist += [tp_cube]

    # remove input files
    if remove:
        for variable in [
                "2m_temperature", "2m_dewpoint_temperature", "surface_pressure"
        ]:
            #        for variable in ["2m_temperature", "2m_dewpoint_temperature", "surface_pressure", "land_sea_mask"]:
            os.remove(
                os.path.join(
                    DataLoc,
                    "{}{:02d}_hourly_{}.nc".format(year, month, variable)))

    return  # combine
def test_laziness():
    """Test that regridding is lazy when source data is lazy."""
    n_lons = 12
    n_lats = 10
    h = 4
    lon_bounds = (-180, 180)
    lat_bounds = (-90, 90)

    mesh = _gridlike_mesh(n_lons, n_lats)

    src_data = np.arange(n_lats * n_lons * h).reshape([n_lats, n_lons, h])
    src_data = da.from_array(src_data, chunks=[3, 5, 2])
    src = Cube(src_data)
    grid = _grid_cube(n_lons, n_lats, lon_bounds, lat_bounds, circular=True)
    src.add_dim_coord(grid.coord("latitude"), 0)
    src.add_dim_coord(grid.coord("longitude"), 1)

    mesh_coord_x, mesh_coord_y = mesh.to_MeshCoords("face")
    tgt_data = np.zeros([n_lats * n_lons])
    tgt = Cube(tgt_data)
    tgt.add_aux_coord(mesh_coord_x, 0)
    tgt.add_aux_coord(mesh_coord_y, 0)

    rg = GridToMeshESMFRegridder(src, tgt)

    assert src.has_lazy_data()
    result = rg(src)
    assert result.has_lazy_data()
    out_chunks = result.lazy_data().chunks
    expected_chunks = ((120, ), (2, 2))
    assert out_chunks == expected_chunks
    assert np.allclose(result.data, src_data.reshape([-1, h]))
Exemplo n.º 52
0
 def test_cube(self):
     dtype = np.float32
     data = np.array([1, 2, 3], dtype=dtype)
     cube = Cube(data)
     self._check_call(cube, dtype)
Exemplo n.º 53
0
 def test_src_data_different_dims(self):
     src, target = self.grids()
     regridder = AreaWeightedRegridder(src, target)
     result = regridder(src)
     expected_mean, expected_std = 4.772097735195653, 2.211698479817678
     self.assertArrayShapeStats(result, (9, 8), expected_mean, expected_std)
     # New source cube with additional "levels" dimension
     # Each level has identical x-y data so the mean and std stats remain
     # identical when x, y and z dims are reordered
     levels = DimCoord(np.arange(5), "model_level_number")
     lat = src.coord("latitude")
     lon = src.coord("longitude")
     data = np.repeat(src.data[np.newaxis, ...], 5, axis=0)
     src = Cube(data)
     src.add_dim_coord(levels, 0)
     src.add_dim_coord(lat, 1)
     src.add_dim_coord(lon, 2)
     result = regridder(src)
     self.assertArrayShapeStats(result, (5, 9, 8), expected_mean,
                                expected_std)
     # Check data with dims in different order
     # Reshape src so that the coords are ordered [x, z, y],
     # the mean and std statistics should be the same
     data = np.moveaxis(src.data.copy(), 2, 0)
     src = Cube(data)
     src.add_dim_coord(lon, 0)
     src.add_dim_coord(levels, 1)
     src.add_dim_coord(lat, 2)
     result = regridder(src)
     self.assertArrayShapeStats(result, (8, 5, 9), expected_mean,
                                expected_std)
     # Check data with dims in different order
     # Reshape src so that the coords are ordered [y, x, z],
     # the mean and std statistics should be the same
     data = np.moveaxis(src.data.copy(), 2, 0)
     src = Cube(data)
     src.add_dim_coord(lat, 0)
     src.add_dim_coord(lon, 1)
     src.add_dim_coord(levels, 2)
     result = regridder(src)
     self.assertArrayShapeStats(result, (9, 8, 5), expected_mean,
                                expected_std)
Exemplo n.º 54
0
class Test_post_process(tests.IrisTest):
    def setUp(self):
        shape = (2, 5)
        data = np.arange(np.prod(shape))

        self.coord_simple = DimCoord(data, "time")
        self.cube_simple = Cube(data)
        self.cube_simple.add_dim_coord(self.coord_simple, 0)

        self.coord_multi_0 = DimCoord(np.arange(shape[0]), "time")
        self.coord_multi_1 = DimCoord(np.arange(shape[1]), "height")
        self.cube_multi = Cube(data.reshape(shape))
        self.cube_multi.add_dim_coord(self.coord_multi_0, 0)
        self.cube_multi.add_dim_coord(self.coord_multi_1, 1)

    def test_missing_mandatory_kwarg(self):
        aggregator = PercentileAggregator()
        emsg = "percentile aggregator requires .* keyword argument 'percent'"
        with self.assertRaisesRegex(ValueError, emsg):
            aggregator.aggregate("dummy", axis=0)

    def test_simple_single_point(self):
        aggregator = PercentileAggregator()
        percent = 50
        kwargs = dict(percent=percent)
        data = np.empty(self.cube_simple.shape)
        coords = [self.coord_simple]
        actual = aggregator.post_process(self.cube_simple, data, coords,
                                         **kwargs)
        self.assertEqual(actual.shape, self.cube_simple.shape)
        self.assertIs(actual.data, data)
        name = "percentile_over_time"
        coord = actual.coord(name)
        expected = AuxCoord(percent, long_name=name, units="percent")
        self.assertEqual(coord, expected)

    def test_simple_multiple_points(self):
        aggregator = PercentileAggregator()
        percent = np.array([10, 20, 50, 90])
        kwargs = dict(percent=percent)
        shape = self.cube_simple.shape + percent.shape
        data = np.empty(shape)
        coords = [self.coord_simple]
        actual = aggregator.post_process(self.cube_simple, data, coords,
                                         **kwargs)
        self.assertEqual(actual.shape, percent.shape + self.cube_simple.shape)
        expected = np.rollaxis(data, -1)
        self.assertArrayEqual(actual.data, expected)
        name = "percentile_over_time"
        coord = actual.coord(name)
        expected = AuxCoord(percent, long_name=name, units="percent")
        self.assertEqual(coord, expected)

    def test_multi_single_point(self):
        aggregator = PercentileAggregator()
        percent = 70
        kwargs = dict(percent=percent)
        data = np.empty(self.cube_multi.shape)
        coords = [self.coord_multi_0]
        actual = aggregator.post_process(self.cube_multi, data, coords,
                                         **kwargs)
        self.assertEqual(actual.shape, self.cube_multi.shape)
        self.assertIs(actual.data, data)
        name = "percentile_over_time"
        coord = actual.coord(name)
        expected = AuxCoord(percent, long_name=name, units="percent")
        self.assertEqual(coord, expected)

    def test_multi_multiple_points(self):
        aggregator = PercentileAggregator()
        percent = np.array([17, 29, 81])
        kwargs = dict(percent=percent)
        shape = self.cube_multi.shape + percent.shape
        data = np.empty(shape)
        coords = [self.coord_multi_0]
        actual = aggregator.post_process(self.cube_multi, data, coords,
                                         **kwargs)
        self.assertEqual(actual.shape, percent.shape + self.cube_multi.shape)
        expected = np.rollaxis(data, -1)
        self.assertArrayEqual(actual.data, expected)
        name = "percentile_over_time"
        coord = actual.coord(name)
        expected = AuxCoord(percent, long_name=name, units="percent")
        self.assertEqual(coord, expected)
Exemplo n.º 55
0
    def _stack_subensembles(self, forecast_subensembles: Cube) -> Cube:
        """Stacking the realization and percentile dimensions in forecast_subensemble
        into a single realization dimension. Realization and percentile are assumed to
        be the first and second dimensions respectively.

        Args:
            input_cube:
                Cube containing the forecast_subensembles.

        Returns:
            Cube containing single realization dimension in place of the realization
            and percentile dimensions in forecast_subensemble.

        Raises:
            ValueError:
                if realization and percentile are not the first and second
                dimensions.
        """
        realization_percentile_dims = (
            *forecast_subensembles.coord_dims("realization"),
            *forecast_subensembles.coord_dims("percentile"),
        )
        if realization_percentile_dims != (0, 1):
            raise ValueError("Invalid dimension coordinate ordering.")
        realization_size = len(
            forecast_subensembles.coord("realization").points)
        percentile_size = len(forecast_subensembles.coord("percentile").points)
        new_realization_coord = DimCoord(
            points=np.arange(realization_size * percentile_size,
                             dtype=np.int32),
            standard_name="realization",
            units="1",
        )
        # As we are stacking the first two dimensions, we need to subtract 1 from all
        # dimension position values.
        dim_coords_and_dims = [(new_realization_coord, 0)]
        dim_coords = forecast_subensembles.coords(dim_coords=True)
        for coord in dim_coords:
            if coord.name() not in ["realization", "percentile"]:
                dims = tuple(
                    d - 1
                    for d in forecast_subensembles.coord_dims(coord.name()))
                dim_coords_and_dims.append((coord, dims))
        aux_coords_and_dims = []
        aux_coords = forecast_subensembles.coords(dim_coords=False)
        for coord in aux_coords:
            dims = tuple(
                d - 1 for d in forecast_subensembles.coord_dims(coord.name()))
            aux_coords_and_dims.append((coord, dims))
        # Stack the first two dimensions.
        superensemble_data = np.reshape(forecast_subensembles.data, (-1, ) +
                                        forecast_subensembles.data.shape[2:])
        superensemble_cube = Cube(
            superensemble_data,
            standard_name=forecast_subensembles.standard_name,
            long_name=forecast_subensembles.long_name,
            var_name=forecast_subensembles.var_name,
            units=forecast_subensembles.units,
            dim_coords_and_dims=dim_coords_and_dims,
            aux_coords_and_dims=aux_coords_and_dims,
            attributes=forecast_subensembles.attributes,
        )
        return superensemble_cube
Exemplo n.º 56
0
 def setUp(self):
     self.cube = Cube(ma.masked_less([1, 2, 3, 4, 5], 3))
     self.cube.add_dim_coord(DimCoord([6, 7, 8, 9, 10], long_name='foo'), 0)
Exemplo n.º 57
0
    def process(self, input_cube: Cube) -> Cube:
        """Convert each point to a truth value based on provided threshold
        function. If the plugin has a "threshold_units"
        member, this is used to convert a copy of the input_cube into
        the units specified.

        Args:
            input_cube:
                Cube to threshold. Must have a latitude coordinate.

        Returns:
            Cube after a threshold has been applied. The data within this
            cube will contain values between 0 and 1 to indicate whether
            a given threshold has been exceeded or not.

                The cube meta-data will contain:
                * Input_cube name prepended with
                probability_of_X_above(or below)_threshold (where X is
                the diagnostic under consideration)
                * Threshold dimension coordinate with same units as input_cube
                * Threshold attribute ("greater_than",
                "greater_than_or_equal_to", "less_than", or
                less_than_or_equal_to" depending on the operator)
                * Cube units set to (1).

        Raises:
            ValueError: if a np.nan value is detected within the input cube.
        """
        if np.isnan(input_cube.data).any():
            raise ValueError("Error: NaN detected in input cube data")

        self.threshold_coord_name = input_cube.name()

        cube = input_cube.copy()
        if self.threshold_units is not None:
            cube.convert_units(self.threshold_units)

        cube.coord("latitude").convert_units("degrees")
        threshold_variant = cube.coord("latitude").points
        threshold_over_latitude = np.array(
            self.threshold_function(threshold_variant))

        # Add a scalar axis for the longitude axis so that numpy's array-
        # broadcasting knows what we want to do
        truth_value = self.comparison_operator["function"](
            cube.data,
            np.expand_dims(threshold_over_latitude, 1),
        )

        truth_value = truth_value.astype(FLOAT_DTYPE)

        if np.ma.is_masked(cube.data):
            # update unmasked points only
            cube.data[~input_cube.data.mask] = truth_value[~input_cube.data.
                                                           mask]
        else:
            cube.data = truth_value

        self._add_latitude_threshold_coord(cube, threshold_over_latitude)
        cube.coord(var_name="threshold").convert_units(input_cube.units)

        self._update_metadata(cube)
        enforce_coordinate_ordering(cube, ["realization", "percentile"])

        return cube
Exemplo n.º 58
0
def build_coefficients_cubelist(
    template: Cube,
    coeff_values: Union[Sequence, np.ndarray],
    forecast_predictors: CubeList,
) -> CubeList:
    """Make a cubelist of coefficients with expected metadata

    Args:
        template (iris.cube.Cube):
            Cube containing information about the time,
            forecast_reference_time, forecast_period, x coordinate and
            y coordinate that will be used within the EMOS coefficient cube.
        coeff_values (numpy.ndarray or list):
            The values of the coefficients. These values will be used as the
            cube data.
        forecast_predictors (iris.cube.CubeList):
            The forecast predictors used for constructing the coordinates
            required for the beta coefficient.

    Returns:
        cubelist (iris.cube.CubeList) - The resulting EMOS
            coefficients cubelist.

    """
    dim_coords_and_dims = []
    aux_coords_and_dims = []

    # add spatial and temporal coords from forecast to be calibrated
    for coord in ["forecast_period", "forecast_reference_time"]:
        aux_coords_and_dims.append((template.coord(coord).copy(), None))

    for coord in [template.coord(axis="x"), template.coord(axis="y")]:
        coord_diffs = np.diff(coord.points)
        min_bound = min(coord.points) - (coord_diffs[0] / 2)
        max_bound = max(coord.points) + (coord_diffs[-1] / 2)
        bounds = [min_bound, max_bound]
        point = np.median(bounds)
        new_coord = coord.copy(points=[point], bounds=[bounds])
        aux_coords_and_dims.append((new_coord, None))

    attributes = {
        "diagnostic_standard_name": "air_temperature",
        "distribution": "norm",
        "title": "Ensemble Model Output Statistics coefficients",
    }

    coeff_names = ["alpha", "beta", "gamma", "delta"]
    cubelist = iris.cube.CubeList([])
    for optimised_coeff, coeff_name in zip(coeff_values, coeff_names):
        modified_dim_coords_and_dims = dim_coords_and_dims.copy()
        modified_aux_coords_and_dims = aux_coords_and_dims.copy()
        coeff_units = "1"
        if coeff_name in ["alpha", "gamma"]:
            coeff_units = template.units
        if coeff_name == "beta":
            fp_names = [fp.name() for fp in forecast_predictors]
            predictor_index = iris.coords.DimCoord(
                np.array(range(len(fp_names)), dtype=np.int32),
                long_name="predictor_index",
                units="1",
            )
            modified_dim_coords_and_dims.append((predictor_index, 0))
            predictor_name = iris.coords.AuxCoord(fp_names,
                                                  long_name="predictor_name",
                                                  units="no_unit")
            modified_aux_coords_and_dims.append((predictor_name, 0))
        cube = iris.cube.Cube(
            np.atleast_1d(optimised_coeff)
            if "beta" == coeff_name else optimised_coeff,
            long_name=f"emos_coefficient_{coeff_name}",
            units=coeff_units,
            dim_coords_and_dims=modified_dim_coords_and_dims,
            aux_coords_and_dims=modified_aux_coords_and_dims,
            attributes=attributes,
        )
        cubelist.append(cube)

    return cubelist
Exemplo n.º 59
0
 def test_no_dim_coord(self):
     cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="foo")
     series = iris.pandas.as_series(cube)
     expected_index = np.array([0, 1, 2, 3, 4])
     self.assertArrayEqual(series, cube.data)
     self.assertArrayEqual(series.index, expected_index)
Exemplo n.º 60
0
    def _validate_coefficients(self, cube: Cube,
                               smoothing_coefficients: CubeList) -> List[Cube]:
        """Validate the smoothing coefficients cubes.

        Args:
            cube:
                2D cube containing the input data to which the recursive
                filter will be applied.

            smoothing_coefficients:
                A cubelist containing two cubes of smoothing_coefficient values,
                one corresponding to smoothing in the x-direction, and the other
                to smoothing in the y-direction.

        Returns:
            A list of smoothing coefficients cubes ordered: [x-coeffs, y-coeffs].

        Raises:
            ValueError: Smoothing coefficient cubes are not named correctly.
            ValueError: If any smoothing_coefficient cube value is over 0.5
            ValueError: The coordinate to be smoothed within the
                smoothing coefficient cube is not of the expected length.
            ValueError: The coordinate to be smoothed within the
                smoothing coefficient cube does not have the expected points.
        """
        # Ensure cubes are in x, y order.
        smoothing_coefficients.sort(key=lambda cell: cell.name())
        axes = ["x", "y"]

        for axis, smoothing_coefficient in zip(axes, smoothing_coefficients):

            # Check the smoothing coefficient cube name is as expected
            expected_name = self.smoothing_coefficient_name_format.format(axis)
            if smoothing_coefficient.name() != expected_name:
                msg = (
                    "The smoothing coefficient cube name {} does not match the "
                    "expected name {}".format(smoothing_coefficient.name(),
                                              expected_name))
                raise ValueError(msg)

            # Check the smoothing coefficients do not exceed an empirically determined
            # maximum value; larger values damage conservation significantly.
            if (smoothing_coefficient.data > 0.5).any():
                raise ValueError(
                    "All smoothing_coefficient values must be less than 0.5. "
                    "A large smoothing_coefficient value leads to poor "
                    "conservation of probabilities")

            for test_axis in axes:
                coefficient_crd = smoothing_coefficient.coord(axis=test_axis)
                if test_axis == axis:
                    expected_points = (
                        cube.coord(axis=test_axis).points[1:] +
                        cube.coord(axis=test_axis).points[:-1]) / 2
                else:
                    expected_points = cube.coord(axis=test_axis).points

                if len(coefficient_crd.points) != len(
                        expected_points) or not np.allclose(
                            coefficient_crd.points, expected_points):
                    msg = (
                        f"The smoothing coefficients {test_axis} dimension does not "
                        "have the expected length or values compared with the cube "
                        "to which smoothing is being applied.\n\nSmoothing "
                        "coefficient cubes must have coordinates that are:\n"
                        "- one element shorter along the dimension being smoothed "
                        f"({axis}) than in the target cube, with points in that "
                        "dimension equal to the mean of each pair of points along "
                        "the dimension in the target cube\n- equal to the points "
                        "in the target cube along the dimension not being smoothed"
                    )
                    raise ValueError(msg)

        return smoothing_coefficients