コード例 #1
0
 def test_cell_datetime_objects(self):
     # Check the scalar coordinate summary still works even when
     # iris.FUTURE.cell_datetime_objects is True.
     cube = Cube(0)
     cube.add_aux_coord(AuxCoord(42, units='hours since epoch'))
     with FUTURE.context(cell_datetime_objects=True):
         summary = cube.summary()
     self.assertIn('1970-01-02 18:00:00', summary)
コード例 #2
0
ファイル: test_Cube.py プロジェクト: SGPeckham/iris
 def test_cell_datetime_objects(self):
     # Check the scalar coordinate summary still works even when
     # iris.FUTURE.cell_datetime_objects is True.
     cube = Cube(0)
     cube.add_aux_coord(AuxCoord(42, units='hours since epoch'))
     with FUTURE.context(cell_datetime_objects=True):
         summary = cube.summary()
     self.assertIn('1970-01-02 18:00:00', summary)
コード例 #3
0
def extract_spread(field, dirname, time):
    analys = load_cube(f'{dirname}/sprd.nc', field)

    # Get minimum duration of data
    analys = analys.extract(Constraint(time=lambda t: t < time))

    # Compute time mean after March 1st 00:00
    with FUTURE.context(cell_datetime_objects=True):
        analys = analys.extract(
            Constraint(time=lambda t: t > PartialDateTime(month=3, day=1)))

    return analys
コード例 #4
0
def plot(dirname, field, vert_range, label):
    with catch_warnings():
        # SPEEDY output is not CF compliant
        simplefilter('ignore', UserWarning)

        # Load cubes
        print(f'Plotting {field}')
        analy = load_cube(f'{dirname}/mean.nc', field)
        nature = load_cube('nature.nc', field)
        sprd = load_cube(f'{dirname}/sprd.nc', field)

        # Get minimum duration of data
        time = min(
            analy.coord('time').points[-1],
            nature.coord('time').points[-1])
        analy = analy.extract(Constraint(time=lambda t: t < time))
        nature = nature.extract(Constraint(time=lambda t: t < time))
        sprd = sprd.extract(Constraint(time=lambda t: t < time))

        # Extract vertically over chosen vertical range
        lev_constraint_lambda = lambda s: s <= vert_range[
            0] and s > vert_range[1]

        # RMSE
        rmse = ((analy - nature)**2)\
                .extract(Constraint(atmosphere_sigma_coordinate=lev_constraint_lambda))\
                .collapsed(['atmosphere_sigma_coordinate', 'longitude'], MEAN)**0.5

        # Spread
        sprd = sprd.extract(Constraint(atmosphere_sigma_coordinate=lev_constraint_lambda))\
                .collapsed(['atmosphere_sigma_coordinate', 'longitude'], MEAN)

        # Compute time mean after March 1st 00:00
        with FUTURE.context(cell_datetime_objects=True):
            rmse = rmse.extract(Constraint(time=lambda t: t > PartialDateTime(month=3,day=1)))\
                    .collapsed('time', MEAN)
            sprd = sprd.extract(Constraint(time=lambda t: t > PartialDateTime(month=3,day=1)))\
                    .collapsed('time', MEAN)

        latitude_coord = rmse.coord('latitude')
        rmse_h, = plt.plot(latitude_coord.points,
                           rmse.data,
                           label=f'{label} error',
                           linestyle='-')
        sprd_h, = plt.plot(latitude_coord.points,
                           sprd.data,
                           label=f'{label} spread',
                           linestyle='--',
                           color=rmse_h.get_color())

        return [rmse_h, sprd_h]
コード例 #5
0
def pick_times(some_cube,years,months,days,hours):
    tconstraint = Constraint(time = lambda a: _get_truth_value(a, years, months, days,hours))

    with FUTURE.context(cell_datetime_objects=True):
        extracted = some_cube.extract(tconstraint)
        if extracted is None:
            t_coord = some_cube.coord('time')
            print("No cube extracted, returning 'None'.")
            print("Is your selection within the time bounds of the original cube?")
            print(t_coord.units.num2date(t_coord.points[0]))
            print(t_coord.units.num2date(t_coord.points[-1]))
            print(t_coord.units.calendar)
            
        return some_cube.extract(tconstraint)
コード例 #6
0
 def test_gdt90_with_bitmap(self):
     with FUTURE.context(strict_grib_load=True):
         path = tests.get_data_path(('GRIB', 'umukv', 'ukv_chan9.grib2'))
         cube = load_cube(path)
         # Pay particular attention to the orientation.
         self.assertIsNot(cube.data[0, 0], ma.masked)
         self.assertIs(cube.data[-1, 0], ma.masked)
         self.assertIs(cube.data[0, -1], ma.masked)
         self.assertIs(cube.data[-1, -1], ma.masked)
         x = cube.coord('projection_x_coordinate').points
         y = cube.coord('projection_y_coordinate').points
         self.assertGreater(x[0], x[-1])  # Decreasing X coordinate
         self.assertLess(y[0], y[-1])  # Increasing Y coordinate
         # Check everything else.
         self.assertCMLApproxData(cube)
コード例 #7
0
ファイル: test_grib2.py プロジェクト: fionaRust/iris
 def test_gdt90_with_bitmap(self):
     with FUTURE.context(strict_grib_load=True):
         path = tests.get_data_path(('GRIB', 'umukv', 'ukv_chan9.grib2'))
         cube = load_cube(path)
         # Pay particular attention to the orientation.
         self.assertIsNot(cube.data[0, 0], ma.masked)
         self.assertIs(cube.data[-1, 0], ma.masked)
         self.assertIs(cube.data[0, -1], ma.masked)
         self.assertIs(cube.data[-1, -1], ma.masked)
         x = cube.coord('projection_x_coordinate').points
         y = cube.coord('projection_y_coordinate').points
         self.assertGreater(x[0], x[-1])  # Decreasing X coordinate
         self.assertLess(y[0], y[-1])  # Increasing Y coordinate
         # Check everything else.
         self.assertCMLApproxData(cube)
コード例 #8
0
    def test_save_load(self):
        cube = stock.lat_lon_cube()
        cube.rename('atmosphere_mole_content_of_ozone')
        cube.units = Unit('Dobson')
        tcoord = DimCoord(23, 'time',
                          units=Unit('days since epoch', calendar='standard'))
        fpcoord = DimCoord(24, 'forecast_period', units=Unit('hours'))
        cube.add_aux_coord(tcoord)
        cube.add_aux_coord(fpcoord)
        cube.attributes['WMO_constituent_type'] = 0

        with self.temp_filename('test_grib_pdt40.grib2') as temp_file_path:
            save(cube, temp_file_path)
            with FUTURE.context(strict_grib_load=True):
                loaded = load_cube(temp_file_path)
            self.assertEqual(loaded.attributes, cube.attributes)
コード例 #9
0
def extract_error(field, dirname):
    analys = load_cube(f'{dirname}/mean.nc', field)
    nature = load_cube('nature.nc', field)

    # Get minimum duration of data
    time = min(
        analys.coord('time').points[-1],
        nature.coord('time').points[-1])
    analys = analys.extract(Constraint(time=lambda t: t < time))
    nature = nature.extract(Constraint(time=lambda t: t < time))

    # Compute time mean after March 1st 00:00
    with FUTURE.context(cell_datetime_objects=True):
        analys = analys.extract(
            Constraint(time=lambda t: t > PartialDateTime(month=3, day=1)))
        nature = nature.extract(
            Constraint(time=lambda t: t > PartialDateTime(month=3, day=1)))

    return analys, nature, time
コード例 #10
0
ファイル: test_grib2.py プロジェクト: fionaRust/iris
 def test_reduced(self):
     path = tests.get_data_path(('GRIB', 'reduced', 'reduced_gg.grib2'))
     with FUTURE.context(strict_grib_load=True):
         cube = load_cube(path)
     self.assertCMLApproxData(cube)
コード例 #11
0
 def setUp(self):
     # Load from the test file.
     file_path = tests.get_data_path(('GRIB', 'time_processed',
                                      'time_bound.grib2'))
     with FUTURE.context(strict_grib_load=True):
         self.cube = load_cube(file_path)
コード例 #12
0
ファイル: test_grib2.py プロジェクト: fionaRust/iris
    def test_save_load(self):
        # Load sample UKV data (variable-resolution rotated grid).
        path = tests.get_data_path(('PP', 'ukV1', 'ukVpmslont.pp'))
        cube = load_cube(path)

        # Extract a single 2D field, for simplicity.
        self.assertEqual(cube.ndim, 3)
        self.assertEqual(cube.coord_dims('time'), (0,))
        cube = cube[0]

        # FOR NOW: **also** fix the data so that it is square, i.e. nx=ny.
        # This is needed because of a bug in the gribapi.
        # See : https://software.ecmwf.int/issues/browse/SUP-1096
        ny, nx = cube.shape
        nn = min(nx, ny)
        cube = cube[:nn, :nn]

        # Check that it has a rotated-pole variable-spaced grid, as expected.
        x_coord = cube.coord(axis='x')
        self.assertIsInstance(x_coord.coord_system, RotatedGeogCS)
        self.assertFalse(is_regular(x_coord))

        # Write to temporary file, check grib_dump output, and load back in.
        with self.temp_filename('ukv_sample.grib2') as temp_file_path:
            save(cube, temp_file_path)

            # Get a grib_dump of the output file.
            dump_text = check_output(('grib_dump -O -wcount=1 ' +
                                      temp_file_path),
                                     shell=True).decode()

            # Check that various aspects of the saved file are as expected.
            expect_strings = (
                'editionNumber = 2',
                'gridDefinitionTemplateNumber = 5',
                'Ni = {:d}'.format(cube.shape[-1]),
                'Nj = {:d}'.format(cube.shape[-2]),
                'shapeOfTheEarth = 1',
                'scaledValueOfRadiusOfSphericalEarth = {:d}'.format(
                    int(UM_DEFAULT_EARTH_RADIUS)),
                'resolutionAndComponentFlags = 0',
                'latitudeOfSouthernPole = -37500000',
                'longitudeOfSouthernPole = 357500000',
                'angleOfRotation = 0')
            for expect in expect_strings:
                self.assertIn(expect, dump_text)

            # Load the Grib file back into a new cube.
            with FUTURE.context(strict_grib_load=True):
                cube_loaded_from_saved = load_cube(temp_file_path)
                # Also load data, before the temporary file gets deleted.
                cube_loaded_from_saved.data

        # The re-loaded result will not match the original in every respect:
        #  * cube attributes are discarded
        #  * horizontal coordinates are rounded to an integer representation
        #  * bounds on horizontal coords are lost
        # Thus the following "equivalence tests" are rather piecemeal..

        # Check those re-loaded properties which should match the original.
        for test_cube in (cube, cube_loaded_from_saved):
            self.assertEqual(test_cube.standard_name,
                             'air_pressure_at_sea_level')
            self.assertEqual(test_cube.units, 'Pa')
            self.assertEqual(test_cube.shape, (744, 744))
            self.assertEqual(test_cube.cell_methods, ())

        # Check no cube attributes on the re-loaded cube.
        # Note: this does *not* match the original, but is as expected.
        self.assertEqual(cube_loaded_from_saved.attributes, {})

        # Now remaining to check: coordinates + data...

        # Check they have all the same coordinates.
        co_names = [coord.name() for coord in cube.coords()]
        co_names_reload = [coord.name()
                           for coord in cube_loaded_from_saved.coords()]
        self.assertEqual(sorted(co_names_reload), sorted(co_names))

        # Check all the coordinates.
        for coord_name in co_names:
            try:
                co_orig = cube.coord(coord_name)
                co_load = cube_loaded_from_saved.coord(coord_name)

                # Check shape.
                self.assertEqual(co_load.shape, co_orig.shape,
                                 'Shape of re-loaded "{}" coord is {} '
                                 'instead of {}'.format(coord_name,
                                                        co_load.shape,
                                                        co_orig.shape))

                # Check coordinate points equal, within a tolerance.
                self.assertArrayAllClose(co_load.points, co_orig.points,
                                         rtol=1.0e-6)

                # Check all coords are unbounded.
                # (NOTE: this is not so for the original X and Y coordinates,
                # but Grib does not store those bounds).
                self.assertIsNone(co_load.bounds)

            except AssertionError as err:
                self.assertTrue(False,
                                'Failed on coordinate "{}" : {}'.format(
                                    coord_name, str(err)))

        # Check that main data array also matches.
        self.assertArrayAllClose(cube.data, cube_loaded_from_saved.data)
コード例 #13
0
def plot(dirname, field, label_in):
    with catch_warnings():
        label = label_in

        # SPEEDY output is not CF compliant
        simplefilter('ignore', UserWarning)

        print(f'Plotting {field}')
        analy_ps = load_cube(f'{dirname}/mean.nc', field)
        nature_ps = load_cube('nature.nc', field)

        # Get minimum duration of data
        time = min(
            analy_ps.coord('time').points[-1],
            nature_ps.coord('time').points[-1])
        analy_ps = analy_ps.extract(Constraint(time=lambda t: t < time))
        nature_ps = nature_ps.extract(Constraint(time=lambda t: t < time))

        # Generate x date axis
        with FUTURE.context(cell_datetime_objects=True):
            time_axis = [x.point for x in nature_ps.coord('time').cells()]

        coords = ['latitude', 'longitude', 'atmosphere_sigma_coordinate']
        rmse = ((analy_ps - nature_ps)**2).collapsed(coords, MEAN)**0.5

        label = label_in + ' RMSE'

        # Try to compute time mean after March 1st 00:00 (if data doesn't go up to March 1st yet,
        # an AttributeError will be thrown - this is ignored
        try:
            with FUTURE.context(cell_datetime_objects=True):
                after_march = rmse.extract(
                    Constraint(
                        time=lambda t: t > PartialDateTime(month=3, day=1)))
                mean = float(after_march.collapsed('time', MEAN).data)
            label += f' ({mean:{4}.{3}})'
        except AttributeError:
            pass

        rmse_h, = plt.plot(time_axis, rmse.data, label=label)

        analy_cb = load_cube(f'{dirname}/sprd.nc', field)
        analy_cb = analy_cb.extract(Constraint(time=lambda t: t < time))

        sprd = analy_cb.collapsed(coords, MEAN)

        label = label_in + ' spread'

        # Try to compute time mean after March 1st 00:00 (if data doesn't go up to March 1st yet,
        # an AttributeError will be thrown - this is ignored
        try:
            with FUTURE.context(cell_datetime_objects=True):
                after_march = sprd.extract(
                    Constraint(
                        time=lambda t: t > PartialDateTime(month=3, day=1)))
                mean = float(after_march.collapsed('time', MEAN).data)
            label += f'  ({mean:{4}.{3}})'
        except AttributeError:
            pass

        sprd_h, = plt.plot(time_axis,
                           sprd.data,
                           linestyle='--',
                           label=label,
                           color=rmse_h.get_color())

        return [rmse_h, sprd_h]
コード例 #14
0
 def test_gdt1(self):
     with FUTURE.context(strict_grib_load=True):
         path = tests.get_data_path(('GRIB', 'rotated_nae_t',
                                     'sensible_pole.grib2'))
         cube = load_cube(path)
         self.assertCMLApproxData(cube)
コード例 #15
0
 def test_grid_complex_spatial_differencing(self):
     path = tests.get_data_path(('GRIB', 'missing_values',
                                 'missing_values.grib2'))
     with FUTURE.context(strict_grib_load=True):
         cube = load_cube(path)
     self.assertCMLApproxData(cube)
コード例 #16
0
def plot(dirname, label_in):
    with catch_warnings():
        label = label_in

        # SPEEDY output is not CF compliant
        simplefilter('ignore', UserWarning)

        print(f'Plotting {fields[0]}')
        analy_ps = load_cube(f'{dirname}/mean.nc', fields[0])
        nature_ps = load_cube('nature.nc', fields[0])

        # Get minimum duration of data
        time = min(
            analy_ps.coord('time').points[-1],
            nature_ps.coord('time').points[-1])
        analy_ps = analy_ps.extract(Constraint(time=lambda t: t < time))
        nature_ps = nature_ps.extract(Constraint(time=lambda t: t < time))

        # Generate x date axis
        with FUTURE.context(cell_datetime_objects=True):
            time_axis = [x.point for x in nature_ps.coord('time').cells()]

        rmse = (((analy_ps - nature_ps) / obs_errors[0])**2).collapsed(
            ['latitude', 'longitude'], SUM)

        for field, obs_error in zip(fields[1:], obs_errors[1:]):
            print(f'Plotting {field}')
            for lev in levels:
                # Build iris constraint object
                lev_con = Constraint(atmosphere_sigma_coordinate=lev)

                analy_ps = load_cube(f'{dirname}/mean.nc', field)
                nature_ps = load_cube(f'nature.nc', field)

                analy_ps = analy_ps.extract(
                    Constraint(time=lambda t: t < time) & lev_con)
                nature_ps = nature_ps.extract(
                    Constraint(time=lambda t: t < time) & lev_con)

                rmse += (((analy_ps - nature_ps) / obs_error)**2).collapsed(
                    ['latitude', 'longitude'], SUM)

        # Divide by the total number of fields (4 3D fields x 8 levels + 1 2D field) and gridpoints (96*48)
        rmse = rmse / (33.0 * 96.0 * 48.0)

        # Square root to get RMSE
        rmse = rmse**0.5

        label = label_in + ' RMSE'

        # Try to compute time mean after March 1st 00:00 (if data doesn't go up to March 1st yet,
        # an AttributeError will be thrown - this is ignored
        try:
            with FUTURE.context(cell_datetime_objects=True):
                after_march = rmse.extract(
                    Constraint(
                        time=lambda t: t > PartialDateTime(month=3, day=1)))
                mean = float(after_march.collapsed('time', MEAN).data)
            label += f' ({mean:{4}.{3}})'
        except AttributeError:
            pass

        rmse_h, = plt.plot(time_axis, rmse.data, label=label)

        for field, obs_error in zip(fields, obs_errors):
            print(f'Plotting {field}')
            analy_cb = load_cube(f'{dirname}/sprd.nc', field)
            analy_cb = analy_cb.extract(Constraint(time=lambda t: t < time))
            if field == 'Surface Pressure [Pa]':
                try:
                    sprd += (analy_cb / obs_error).collapsed(
                        ['latitude', 'longitude'], SUM)
                except NameError:
                    sprd = (analy_cb / obs_error).collapsed(
                        ['latitude', 'longitude'], SUM)
            else:
                for lev in levels:
                    analy_cb_lev = analy_cb.extract(
                        Constraint(atmosphere_sigma_coordinate=lev))
                    try:
                        sprd += (analy_cb_lev / obs_error).collapsed(
                            ['latitude', 'longitude'], SUM)
                    except NameError:
                        sprd = (analy_cb_lev / obs_error).collapsed(
                            ['latitude', 'longitude'], SUM)

        # Divide by the total number of fields (4 3D fields x 8 levels + 1 2D field) and gridpoints (96*48)
        sprd = sprd / (33.0 * 96.0 * 48.0)

        label = label_in + ' spread'

        # Try to compute time mean after March 1st 00:00 (if data doesn't go up to March 1st yet,
        # an AttributeError will be thrown - this is ignored
        try:
            with FUTURE.context(cell_datetime_objects=True):
                after_march = sprd.extract(
                    Constraint(
                        time=lambda t: t > PartialDateTime(month=3, day=1)))
                mean = float(after_march.collapsed('time', MEAN).data)
            label += f'  ({mean:{4}.{3}})'
        except AttributeError:
            pass

        sprd_h, = plt.plot(time_axis,
                           sprd.data,
                           linestyle='--',
                           label=label,
                           color=rmse_h.get_color())

        return [rmse_h, sprd_h]
コード例 #17
0
ファイル: test_grib2.py プロジェクト: fionaRust/iris
 def test_grid_complex_spatial_differencing(self):
     path = tests.get_data_path(('GRIB', 'missing_values',
                                 'missing_values.grib2'))
     with FUTURE.context(strict_grib_load=True):
         cube = load_cube(path)
     self.assertCMLApproxData(cube)
コード例 #18
0
ファイル: test_grib2.py プロジェクト: fionaRust/iris
 def test_gdt1(self):
     with FUTURE.context(strict_grib_load=True):
         path = tests.get_data_path(('GRIB', 'rotated_nae_t',
                                     'sensible_pole.grib2'))
         cube = load_cube(path)
         self.assertCMLApproxData(cube)
コード例 #19
0
    def test_save_load(self):
        # Load sample UKV data (variable-resolution rotated grid).
        path = tests.get_data_path(('PP', 'ukV1', 'ukVpmslont.pp'))
        cube = load_cube(path)

        # Extract a single 2D field, for simplicity.
        self.assertEqual(cube.ndim, 3)
        self.assertEqual(cube.coord_dims('time'), (0,))
        cube = cube[0]

        # FOR NOW: **also** fix the data so that it is square, i.e. nx=ny.
        # This is needed because of a bug in the gribapi.
        # See : https://software.ecmwf.int/issues/browse/SUP-1096
        ny, nx = cube.shape
        nn = min(nx, ny)
        cube = cube[:nn, :nn]

        # Check that it has a rotated-pole variable-spaced grid, as expected.
        x_coord = cube.coord(axis='x')
        self.assertIsInstance(x_coord.coord_system, RotatedGeogCS)
        self.assertFalse(is_regular(x_coord))

        # Write to temporary file, check that key contents are in the file,
        # then load back in.
        with self.temp_filename('ukv_sample.grib2') as temp_file_path:
            save(cube, temp_file_path)

            # Check that various aspects of the saved file are as expected.
            expect_values = (
                (0, 'editionNumber', 2),
                (3, 'gridDefinitionTemplateNumber', 5),
                (3, 'Ni', cube.shape[-1]),
                (3, 'Nj', cube.shape[-2]),
                (3, 'shapeOfTheEarth', 1),
                (3, 'scaledValueOfRadiusOfSphericalEarth',
                 int(UM_DEFAULT_EARTH_RADIUS)),
                (3, 'resolutionAndComponentFlags', 0),
                (3, 'latitudeOfSouthernPole', -37500000),
                (3, 'longitudeOfSouthernPole', 357500000),
                (3, 'angleOfRotation', 0))
            self.assertGribMessageContents(temp_file_path, expect_values)

            # Load the Grib file back into a new cube.
            with FUTURE.context(strict_grib_load=True):
                cube_loaded_from_saved = load_cube(temp_file_path)
                # Also load data, before the temporary file gets deleted.
                cube_loaded_from_saved.data

        # The re-loaded result will not match the original in every respect:
        #  * cube attributes are discarded
        #  * horizontal coordinates are rounded to an integer representation
        #  * bounds on horizontal coords are lost
        # Thus the following "equivalence tests" are rather piecemeal..

        # Check those re-loaded properties which should match the original.
        for test_cube in (cube, cube_loaded_from_saved):
            self.assertEqual(test_cube.standard_name,
                             'air_pressure_at_sea_level')
            self.assertEqual(test_cube.units, 'Pa')
            self.assertEqual(test_cube.shape, (744, 744))
            self.assertEqual(test_cube.cell_methods, ())

        # Check no cube attributes on the re-loaded cube.
        # Note: this does *not* match the original, but is as expected.
        self.assertEqual(cube_loaded_from_saved.attributes, {})

        # Now remaining to check: coordinates + data...

        # Check they have all the same coordinates.
        co_names = [coord.name() for coord in cube.coords()]
        co_names_reload = [coord.name()
                           for coord in cube_loaded_from_saved.coords()]
        self.assertEqual(sorted(co_names_reload), sorted(co_names))

        # Check all the coordinates.
        for coord_name in co_names:
            try:
                co_orig = cube.coord(coord_name)
                co_load = cube_loaded_from_saved.coord(coord_name)

                # Check shape.
                self.assertEqual(co_load.shape, co_orig.shape,
                                 'Shape of re-loaded "{}" coord is {} '
                                 'instead of {}'.format(coord_name,
                                                        co_load.shape,
                                                        co_orig.shape))

                # Check coordinate points equal, within a tolerance.
                self.assertArrayAllClose(co_load.points, co_orig.points,
                                         rtol=1.0e-6)

                # Check all coords are unbounded.
                # (NOTE: this is not so for the original X and Y coordinates,
                # but Grib does not store those bounds).
                self.assertIsNone(co_load.bounds)

            except AssertionError as err:
                self.assertTrue(False,
                                'Failed on coordinate "{}" : {}'.format(
                                    coord_name, str(err)))

        # Check that main data array also matches.
        self.assertArrayAllClose(cube.data, cube_loaded_from_saved.data)
コード例 #20
0
ファイル: test_grib2.py プロジェクト: fionaRust/iris
 def setUp(self):
     # Load from the test file.
     file_path = tests.get_data_path(('GRIB', 'time_processed',
                                      'time_bound.grib2'))
     with FUTURE.context(strict_grib_load=True):
         self.cube = load_cube(file_path)
コード例 #21
0
 def test_reduced(self):
     path = tests.get_data_path(('GRIB', 'reduced', 'reduced_gg.grib2'))
     with FUTURE.context(strict_grib_load=True):
         cube = load_cube(path)
     self.assertCMLApproxData(cube)