def setUp(self):
     module = 'iris.fileformats.grib._load_convert'
     patch = []
     self.major = mock.sentinel.major
     self.minor = mock.sentinel.minor
     self.radius = mock.sentinel.radius
     this = '{}.ellipsoid_geometry'.format(module)
     return_value = (self.major, self.minor, self.radius)
     patch.append(mock.patch(this, return_value=return_value))
     this = '{}.ellipsoid'.format(module)
     self.ellipsoid = mock.sentinel.ellipsoid
     patch.append(mock.patch(this, return_value=self.ellipsoid))
     this = '{}.grid_definition_template_4_and_5'.format(module)
     self.coord = mock.sentinel.coord
     self.dim = mock.sentinel.dim
     item = (self.coord, self.dim)
     func = lambda s, m, y, x, c: m['dim_coords_and_dims'].append(item)
     patch.append(mock.patch(this, side_effect=func))
     this = 'iris.coord_systems.RotatedGeogCS'
     self.cs = mock.sentinel.cs
     patch.append(mock.patch(this, return_value=self.cs))
     self.metadata = {'factories': [], 'references': [],
                      'standard_name': None,
                      'long_name': None, 'units': None, 'attributes': {},
                      'cell_methods': [], 'dim_coords_and_dims': [],
                      'aux_coords_and_dims': []}
     for p in patch:
         p.start()
         self.addCleanup(p.stop)
 def setUp(self):
     module = 'iris.fileformats.grib._load_convert'
     patch = []
     self.major = mock.sentinel.major
     self.minor = mock.sentinel.minor
     self.radius = mock.sentinel.radius
     this = '{}.ellipsoid_geometry'.format(module)
     return_value = (self.major, self.minor, self.radius)
     patch.append(mock.patch(this, return_value=return_value))
     this = '{}.ellipsoid'.format(module)
     self.ellipsoid = mock.sentinel.ellipsoid
     patch.append(mock.patch(this, return_value=self.ellipsoid))
     this = '{}.grid_definition_template_4_and_5'.format(module)
     self.coord = mock.sentinel.coord
     self.dim = mock.sentinel.dim
     item = (self.coord, self.dim)
     func = lambda s, m, y, x, c: m['dim_coords_and_dims'].append(item)
     patch.append(mock.patch(this, side_effect=func))
     this = 'iris.coord_systems.RotatedGeogCS'
     self.cs = mock.sentinel.cs
     patch.append(mock.patch(this, return_value=self.cs))
     self.metadata = {'factories': [], 'references': [],
                      'standard_name': None,
                      'long_name': None, 'units': None, 'attributes': {},
                      'cell_methods': [], 'dim_coords_and_dims': [],
                      'aux_coords_and_dims': []}
     for p in patch:
         p.start()
         self.addCleanup(p.stop)
Example #3
0
    def test_hybrid_pressure_with_duplicate_references(self):
        # Make a fake reference surface field.
        pressure_field = self._field_with_data(10,
                                               stash=iris.fileformats.pp.STASH(
                                                   1, 0, 409),
                                               lbuser=[0, 0, 0, 409, 0, 0, 0])

        # Make a fake data field which needs the reference surface.
        model_level = 5678
        sigma_lower, sigma, sigma_upper = 0.85, 0.9, 0.95
        delta_lower, delta, delta_upper = 0.05, 0.1, 0.15
        data_field = self._field_with_data(lbvc=9,
                                           lblev=model_level,
                                           bhlev=delta,
                                           bhrlev=delta_lower,
                                           blev=sigma,
                                           brlev=sigma_lower,
                                           brsvd=[sigma_upper, delta_upper])

        # Convert both fields to cubes.
        load = mock.Mock(
            return_value=iter([data_field, pressure_field, pressure_field]))
        msg = 'Multiple reference cubes for surface_air_pressure'
        with mock.patch('iris.fileformats.pp.load',
                        new=load) as load, mock.patch('warnings.warn') as warn:
            _, _, _ = iris.fileformats.pp.load_cubes('DUMMY')
            warn.assert_called_with(msg)
    def test_vectorise_call(self):
        # Check that the function being passed through gets called with
        # numpy.vectorize, before being applied to the points array.
        # The reason we use numpy.vectorize is to support multi-dimensional
        # coordinate points.
        fn = lambda coord, v: v**2

        with mock.patch('numpy.vectorize',
                        return_value=self.vectorised) as vectorise_patch:
            with mock.patch('iris.coords.AuxCoord') as aux_coord_constructor:
                add_categorised_coord(self.cube, 'foobar', self.coord, fn,
                                      units=self.units)

        # Check the constructor of AuxCoord gets called with the
        # appropriate arguments.
        # Start with the vectorised function.
        vectorise_patch.assert_called_once_with(fn)
        # Check the vectorize wrapper gets called with the appropriate args.
        self.vectorised.assert_called_once_with(self.coord, self.coord.points)
        # Check the AuxCoord constructor itself.
        aux_coord_constructor.assert_called_once_with(
            self.vectorised(self.coord, self.coord.points),
            units=self.units,
            attributes=self.coord.attributes.copy())
        # And check adding the aux coord to the cube mock.
        self.cube.add_aux_coord.assert_called_once_with(
            aux_coord_constructor(), self.cube.coord_dims(self.coord))
    def test_vectorise_call(self):
        # Check that the function being passed through gets called with
        # numpy.vectorize, before being applied to the points array.
        # The reason we use numpy.vectorize is to support multi-dimensional
        # coordinate points.
        fn = lambda coord, v: v**2

        with mock.patch('numpy.vectorize',
                        return_value=self.vectorised) as vectorise_patch:
            with mock.patch('iris.coords.AuxCoord') as aux_coord_constructor:
                add_categorised_coord(self.cube, 'foobar', self.coord, fn,
                                      units=self.units)

        # Check the constructor of AuxCoord gets called with the
        # appropriate arguments.
        # Start with the vectorised function.
        vectorise_patch.assert_called_once_with(fn)
        # Check the vectorize wrapper gets called with the appropriate args.
        self.vectorised.assert_called_once_with(self.coord, self.coord.points)
        # Check the AuxCoord constructor itself.
        aux_coord_constructor.assert_called_once_with(
            self.vectorised(self.coord, self.coord.points),
            units=self.units,
            attributes=self.coord.attributes.copy())
        # And check adding the aux coord to the cube mock.
        self.cube.add_aux_coord.assert_called_once_with(
            aux_coord_constructor(), self.cube.coord_dims(self.coord))
    def setUp(self):
        # Create dummy pyke engine.
        self.engine = mock.Mock(
            cube=mock.Mock(),
            cf_var=mock.Mock(dimensions=('foo', 'bar')),
            filename='DUMMY',
            provides=dict(coordinates=[]))

        # Create patch for deferred loading that prevents attempted
        # file access. This assumes that self.cf_coord_var and
        # self.cf_bounds_var are defined in the test case.
        def patched__getitem__(proxy_self, keys):
            for var in (self.cf_coord_var, self.cf_bounds_var):
                if proxy_self.variable_name == var.cf_name:
                    return var[keys]
            raise RuntimeError()

        self.deferred_load_patch = mock.patch(
            'iris.fileformats.netcdf.NetCDFDataProxy.__getitem__',
            new=patched__getitem__)

        # Patch the helper function that retrieves the bounds cf variable.
        # This avoids the need for setting up further mocking of cf objects.
        def get_cf_bounds_var(coord_var):
            return self.cf_bounds_var

        self.get_cf_bounds_var_patch = mock.patch(
            'iris.fileformats._pyke_rules.compiled_krb.'
            'fc_rules_cf_fc.get_cf_bounds_var',
            new=get_cf_bounds_var)
Example #7
0
    def setUp(self):
        self.delta = netcdf_variable('delta',
                                     'height',
                                     np.float,
                                     bounds='delta_bnds')
        self.delta_bnds = netcdf_variable('delta_bnds', 'height bnds',
                                          np.float)
        self.sigma = netcdf_variable('sigma',
                                     'height',
                                     np.float,
                                     bounds='sigma_bnds')
        self.sigma_bnds = netcdf_variable('sigma_bnds', 'height bnds',
                                          np.float)
        self.orography = netcdf_variable('orography', 'lat lon', np.float)
        formula_terms = 'a: delta b: sigma orog: orography'
        standard_name = 'atmosphere_hybrid_height_coordinate'
        self.height = netcdf_variable('height',
                                      'height',
                                      np.float,
                                      formula_terms=formula_terms,
                                      bounds='height_bnds',
                                      standard_name=standard_name)
        # Over-specify the formula terms on the bounds variable,
        # which will be ignored by the cf loader.
        formula_terms = 'a: delta_bnds b: sigma_bnds orog: orography'
        self.height_bnds = netcdf_variable('height_bnds',
                                           'height bnds',
                                           np.float,
                                           formula_terms=formula_terms)
        self.lat = netcdf_variable('lat', 'lat', np.float)
        self.lon = netcdf_variable('lon', 'lon', np.float)
        # Note that, only lat and lon are explicitly associated as coordinates.
        self.temp = netcdf_variable('temp',
                                    'height lat lon',
                                    np.float,
                                    coordinates='lat lon')

        self.variables = dict(delta=self.delta,
                              sigma=self.sigma,
                              orography=self.orography,
                              height=self.height,
                              lat=self.lat,
                              lon=self.lon,
                              temp=self.temp,
                              delta_bnds=self.delta_bnds,
                              sigma_bnds=self.sigma_bnds,
                              height_bnds=self.height_bnds)
        ncattrs = mock.Mock(return_value=[])
        self.dataset = mock.Mock(file_format='NetCDF4',
                                 variables=self.variables,
                                 ncattrs=ncattrs)
        # Restrict the CFReader functionality to only performing translations.
        build_patch = mock.patch(
            'iris.fileformats.cf.CFReader._build_cf_groups')
        reset_patch = mock.patch('iris.fileformats.cf.CFReader._reset')
        build_patch.start()
        reset_patch.start()
        self.addCleanup(build_patch.stop)
        self.addCleanup(reset_patch.stop)
Example #8
0
    def test_hybrid_height_round_trip_no_reference(self):
        # Use pp.load_cubes() to convert fake PPFields into Cubes.
        # NB. Use MagicMock so that SplittableInt header items, such as
        # LBCODE, support len().
        # Make a fake data field which needs the reference surface.
        model_level = 5678
        sigma_lower, sigma, sigma_upper = 0.85, 0.9, 0.95
        delta_lower, delta, delta_upper = 0.05, 0.1, 0.15
        data_field = self._field_with_data(lbvc=65,
                                           lblev=model_level,
                                           bhlev=sigma,
                                           bhrlev=sigma_lower,
                                           blev=delta,
                                           brlev=delta_lower,
                                           brsvd=[delta_upper, sigma_upper])

        # Convert field to a cube.
        load = mock.Mock(return_value=iter([data_field]))
        with mock.patch('iris.fileformats.pp.load', new=load) as load, \
                mock.patch('warnings.warn') as warn:
            data_cube, = iris.fileformats.pp.load_cubes('DUMMY')

        msg = "Unable to create instance of HybridHeightFactory. " \
              "The source data contains no field(s) for 'orography'."
        warn.assert_called_once_with(msg)

        # Check the data cube is set up to use hybrid height.
        self._test_coord(data_cube,
                         model_level,
                         standard_name='model_level_number')
        self._test_coord(data_cube,
                         delta, [delta_lower, delta_upper],
                         long_name='level_height')
        self._test_coord(data_cube,
                         sigma, [sigma_lower, sigma_upper],
                         long_name='sigma')
        # Check that no aux factory is created (due to missing
        # reference surface).
        aux_factories = data_cube.aux_factories
        self.assertEqual(len(aux_factories), 0)

        # Now use the save rules to convert the Cube back into a PPField.
        data_field = iris.fileformats.pp.PPField3()
        data_field.lbfc = 0
        data_field.lbvc = 0
        data_field.brsvd = [None, None]
        data_field.lbuser = [None] * 7
        data_field = verify(data_cube, data_field)

        # Check the data field has the vertical coordinate as originally
        # specified.
        self.assertEqual(data_field.lbvc, 65)
        self.assertEqual(data_field.lblev, model_level)
        self.assertEqual(data_field.bhlev, sigma)
        self.assertEqual(data_field.bhrlev, sigma_lower)
        self.assertEqual(data_field.blev, delta)
        self.assertEqual(data_field.brlev, delta_lower)
        self.assertEqual(data_field.brsvd, [delta_upper, sigma_upper])
Example #9
0
 def test_formula_terms_ignore(self):
     self.orography.dimensions = ['lat', 'wibble']
     with mock.patch('netCDF4.Dataset', return_value=self.dataset), \
             mock.patch('warnings.warn') as warn:
         cf_group = CFReader('dummy').cf_group
         group = cf_group.promoted
         self.assertEqual(list(group.keys()), ['orography'])
         self.assertIs(group['orography'].cf_data, self.orography)
         self.assertEqual(warn.call_count, 1)
Example #10
0
 def test_formula_terms_ignore(self):
     self.orography.dimensions = ['lat', 'wibble']
     with mock.patch('netCDF4.Dataset', return_value=self.dataset), \
             mock.patch('warnings.warn') as warn:
         cf_group = CFReader('dummy').cf_group
         group = cf_group.promoted
         self.assertEqual(list(group.keys()), ['orography'])
         self.assertIs(group['orography'].cf_data, self.orography)
         self.assertEqual(warn.call_count, 1)
Example #11
0
 def test_valid_relative_path(self):
     # Override the configuration value for System.dot_path
     dummy_path = 'not_a_real_path' * 10
     assert not os.path.exists(dummy_path)
     with mock.patch('iris.config.get_option', return_value=dummy_path):
         # Pretend we have a valid installation of dot
         with mock.patch('subprocess.check_output'):
             result = _dot_path()
     self.assertEqual(result, dummy_path)
Example #12
0
 def test_unknown(self):
     header = self._header(0)
     with mock.patch('iris.fileformats.ff.NewDynamics',
                     mock.Mock(return_value=mock.sentinel.grid)):
         with mock.patch('warnings.warn') as warn:
             grid = header.grid()
     warn.assert_called_with('Staggered grid type: 0 not currently'
                             ' interpreted, assuming standard C-grid')
     self.assertIs(grid, mock.sentinel.grid)
Example #13
0
 def test_valid_relative_path_broken_install(self):
     # Override the configuration value for System.dot_path
     dummy_path = 'not_a_real_path' * 10
     assert not os.path.exists(dummy_path)
     with mock.patch('iris.config.get_option', return_value=dummy_path):
         # Pretend we have a broken installation of dot
         error = subprocess.CalledProcessError(-5, 'foo', 'bar')
         with mock.patch('subprocess.check_output', side_effect=error):
             result = _dot_path()
     self.assertIsNone(result)
Example #14
0
 def test_auxiliary_ignore(self):
     self.x.dimensions = ['lat', 'wibble']
     with mock.patch('netCDF4.Dataset', return_value=self.dataset), \
             mock.patch('warnings.warn') as warn:
         cf_group = CFReader('dummy').cf_group
         promoted = ['x', 'orography']
         group = cf_group.promoted
         self.assertEqual(set(group.keys()), set(promoted))
         for name in promoted:
             self.assertIs(group[name].cf_data, getattr(self, name))
         self.assertEqual(warn.call_count, 1)
Example #15
0
 def test_auxiliary_ignore(self):
     self.x.dimensions = ['lat', 'wibble']
     with mock.patch('netCDF4.Dataset', return_value=self.dataset), \
             mock.patch('warnings.warn') as warn:
         cf_group = CFReader('dummy').cf_group
         promoted = ['x', 'orography']
         group = cf_group.promoted
         self.assertEqual(set(group.keys()), set(promoted))
         for name in promoted:
             self.assertIs(group[name].cf_data, getattr(self, name))
         self.assertEqual(warn.call_count, 1)
 def test_grib1(self):
     sections = [{'editionNumber': 1}]
     message = self._make_test_message(sections)
     mfunc = 'iris.fileformats.grib.GribMessage.messages_from_filename'
     mclass = 'iris.fileformats.grib.GribWrapper'
     with mock.patch(mfunc, return_value=[message]) as mock_func:
         with mock.patch(mclass, spec=GribWrapper) as mock_wrapper:
             field = next(_load_generate(self.fname))
             mock_func.assert_called_once_with(self.fname)
             self.assertIsInstance(field, GribWrapper)
             mock_wrapper.assert_called_once_with(self.message_id,
                                                  grib_fh=self.grib_fh)
Example #17
0
 def test_no_weights(self):
     # Check we can use the regridder without weights.
     src_grid = self.src_grid
     target_grid = self.tgt_grid
     regridder = Regridder(src_grid, target_grid)
     with mock.patch(self.func_setup,
                     return_value=mock.sentinel.regrid_info) as patch_setup:
         with mock.patch(
                 self.func_operate,
                 return_value=self.dummy_slice_result) as patch_operate:
             result = regridder(src_grid)
     patch_setup.assert_called_once_with(src_grid, None, target_grid)
Example #18
0
 def test_promoted_auxiliary_ignore(self):
     self.wibble = netcdf_variable('wibble', 'lat wibble', np.float)
     self.variables['wibble'] = self.wibble
     self.orography.coordinates = 'wibble'
     with mock.patch('netCDF4.Dataset', return_value=self.dataset), \
             mock.patch('warnings.warn') as warn:
         cf_group = CFReader('dummy').cf_group.promoted
         promoted = ['wibble', 'orography']
         self.assertEqual(set(cf_group.keys()), set(promoted))
         for name in promoted:
             self.assertIs(cf_group[name].cf_data, getattr(self, name))
         self.assertEqual(warn.call_count, 2)
Example #19
0
 def test_promoted_auxiliary_ignore(self):
     self.wibble = netcdf_variable('wibble', 'lat wibble', np.float)
     self.variables['wibble'] = self.wibble
     self.orography.coordinates = 'wibble'
     with mock.patch('netCDF4.Dataset', return_value=self.dataset), \
             mock.patch('warnings.warn') as warn:
         cf_group = CFReader('dummy').cf_group.promoted
         promoted = ['wibble', 'orography']
         self.assertEqual(set(cf_group.keys()), set(promoted))
         for name in promoted:
             self.assertIs(cf_group[name].cf_data, getattr(self, name))
         self.assertEqual(warn.call_count, 2)
 def test_no_weights(self):
     # Check we can use the regridder without weights.
     src_grid = self.src_grid
     target_grid = self.tgt_grid
     regridder = Regridder(src_grid, target_grid)
     with mock.patch(self.func_setup,
                     return_value=mock.sentinel.regrid_info) as patch_setup:
         with mock.patch(
                 self.func_operate,
                 return_value=self.dummy_slice_result) as patch_operate:
             result = regridder(src_grid)
     patch_setup.assert_called_once_with(
         src_grid, None, target_grid)
Example #21
0
    def test_call_structure(self):
        # Check that the load function calls the two necessary utility
        # functions.
        extract_result = mock.Mock()
        interpret_patch = mock.patch('iris.fileformats.pp._interpret_fields',
                                     autospec=True, return_value=iter([]))
        field_gen_patch = mock.patch('iris.fileformats.pp._field_gen',
                                     autospec=True,
                                     return_value=extract_result)
        with interpret_patch as interpret, field_gen_patch as field_gen:
            pp.load('mock', read_data=True)

        interpret.assert_called_once_with(extract_result)
        field_gen.assert_called_once_with('mock', read_data_bytes=True)
Example #22
0
 def test_formula_terms_ignore(self):
     self.orography.dimensions = ['lat', 'wibble']
     for state in [False, True]:
         with mock.patch('netCDF4.Dataset', return_value=self.dataset), \
                 iris.FUTURE.context(netcdf_promote=state), \
                 mock.patch('warnings.warn') as warn:
             cf_group = CFReader('dummy').cf_group
             if state:
                 group = cf_group.promoted
                 self.assertEqual(list(group.keys()), ['orography'])
                 self.assertIs(group['orography'].cf_data, self.orography)
             else:
                 self.assertEqual(len(cf_group.promoted), 0)
         self.assertEqual(warn.call_count, 1)
Example #23
0
 def test_formula_terms_ignore(self):
     self.orography.dimensions = ['lat', 'wibble']
     for state in [False, True]:
         with mock.patch('netCDF4.Dataset', return_value=self.dataset), \
                 iris.FUTURE.context(netcdf_promote=state), \
                 mock.patch('warnings.warn') as warn:
             cf_group = CFReader('dummy').cf_group
             if state:
                 group = cf_group.promoted
                 self.assertEqual(list(group.keys()), ['orography'])
                 self.assertIs(group['orography'].cf_data, self.orography)
                 self.assertEqual(warn.call_count, 1)
             else:
                 self.assertEqual(len(cf_group.promoted), 0)
                 self.assertEqual(warn.call_count, 2)
 def _check(self, hours, minutes, request_warning, expect_warning=False):
     # Setup the environment.
     patch_target = 'iris.fileformats.grib._load_convert.options'
     with mock.patch(patch_target) as options:
         options.warn_on_unsupported = request_warning
         with mock.patch('warnings.warn') as warn:
             # The call being tested.
             data_cutoff(hours, minutes)
     # Check the result.
     if expect_warning:
         self.assertEqual(len(warn.mock_calls), 1)
         args, kwargs = warn.call_args
         self.assertIn('data cutoff', args[0])
     else:
         self.assertEqual(len(warn.mock_calls), 0)
Example #25
0
    def test_call_structure(self, _FFHeader):
        # Check that the iter method calls the two necessary utility
        # functions
        extract_result = mock.Mock()
        interpret_patch = mock.patch('iris.fileformats.pp._interpret_fields',
                                     autospec=True, return_value=iter([]))
        extract_patch = mock.patch('iris.fileformats.ff.FF2PP._extract_field',
                                   autospec=True, return_value=extract_result)

        FF2PP_instance = ff.FF2PP('mock')
        with interpret_patch as interpret, extract_patch as extract:
            list(iter(FF2PP_instance))

        interpret.assert_called_once_with(extract_result)
        extract.assert_called_once_with(FF2PP_instance)
    def test_string_vectorised(self):
        # Check that special case handling of a vectorized string returning
        # function is taking place.
        fn = lambda coord, v: '0123456789'[:v]

        with mock.patch('numpy.vectorize',
                        return_value=self.vectorised) as vectorise_patch:
            with mock.patch('iris.coords.AuxCoord') as aux_coord_constructor:
                add_categorised_coord(self.cube, 'foobar', self.coord, fn,
                                      units=self.units)

        self.assertEqual(
            aux_coord_constructor.call_args[0][0],
            vectorise_patch(fn, otypes=[object])(self.coord, self.coord.points)
            .astype('|S64'))
Example #27
0
    def test_call_structure(self):
        # Check that the load function calls the two necessary utility
        # functions.
        extract_result = mock.Mock()
        interpret_patch = mock.patch('iris.fileformats.pp._interpret_fields',
                                     autospec=True,
                                     return_value=iter([]))
        field_gen_patch = mock.patch('iris.fileformats.pp._field_gen',
                                     autospec=True,
                                     return_value=extract_result)
        with interpret_patch as interpret, field_gen_patch as field_gen:
            pp.load('mock', read_data=True)

        interpret.assert_called_once_with(extract_result)
        field_gen.assert_called_once_with('mock', read_data_bytes=True)
    def test_string_vectorised(self):
        # Check that special case handling of a vectorized string returning
        # function is taking place.
        fn = lambda coord, v: '0123456789'[:v]

        with mock.patch('numpy.vectorize',
                        return_value=self.vectorised) as vectorise_patch:
            with mock.patch('iris.coords.AuxCoord') as aux_coord_constructor:
                add_categorised_coord(self.cube, 'foobar', self.coord, fn,
                                      units=self.units)

        self.assertEqual(
            aux_coord_constructor.call_args[0][0],
            vectorise_patch(fn, otypes=[object])(self.coord, self.coord.points)
            .astype('|S64'))
Example #29
0
 def _check(self, hours, minutes, request_warning, expect_warning=False):
     # Setup the environment.
     patch_target = 'iris.fileformats.grib._load_convert.options'
     with mock.patch(patch_target) as options:
         options.warn_on_unsupported = request_warning
         with mock.patch('warnings.warn') as warn:
             # The call being tested.
             data_cutoff(hours, minutes)
     # Check the result.
     if expect_warning:
         self.assertEqual(len(warn.mock_calls), 1)
         args, kwargs = warn.call_args
         self.assertIn('data cutoff', args[0])
     else:
         self.assertEqual(len(warn.mock_calls), 0)
Example #30
0
 def test_33214(self):
     with mock.patch('warnings.warn') as warn:
         t = pp.BitwiseInt(33214)
     self.assertEqual(warn.call_count, 1)
     self.assertEqual(t[0], 4)
     self.assertEqual(t.flag1, 0)
     self.assertEqual(t.flag2, 1)
Example #31
0
    def test_soil_level_round_trip(self):
        # Use pp.load_cubes() to convert a fake PPField into a Cube.
        # NB. Use MagicMock so that SplittableInt header items, such as
        # LBCODE, support len().
        soil_level = 1234
        field = mock.MagicMock(lbvc=6,
                               lblev=soil_level,
                               stash=iris.fileformats.pp.STASH(1, 0, 9),
                               lbuser=[0] * 7,
                               lbrsvd=[0] * 4,
                               brsvd=[0] * 4,
                               brlev=0)
        load = mock.Mock(return_value=iter([field]))
        with mock.patch('iris.fileformats.pp.load', new=load) as load:
            cube = next(iris.fileformats.pp.load_cubes('DUMMY'))

        self.assertIn('soil', cube.standard_name)
        self._test_coord(cube, soil_level, long_name='soil_model_level_number')

        # Now use the save rules to convert the Cube back into a PPField.
        field = iris.fileformats.pp.PPField3()
        field.lbfc = 0
        field.lbvc = 0
        field.brsvd = [None] * 4
        field.brlev = None
        run_save_rules(cube, field)

        # Check the vertical coordinate is as originally specified.
        self.assertEqual(field.lbvc, 6)
        self.assertEqual(field.lblev, soil_level)
        self.assertEqual(field.blev, soil_level)
        self.assertEqual(field.brsvd[0], 0)
        self.assertEqual(field.brlev, 0)
Example #32
0
    def setUp(self):
        self.container = mock.Mock(name='container', attributes={})
        self.data = np.array(1, dtype='int32')

        patch = mock.patch('netCDF4.Dataset')
        mock_netcdf_dataset = patch.start()
        self.addCleanup(patch.stop)
Example #33
0
    def test_soil_depth_round_trip(self):
        # Use pp.load_cubes() to convert a fake PPField into a Cube.
        # NB. Use MagicMock so that SplittableInt header items, such as
        # LBCODE, support len().
        lower, point, upper = 1.2, 3.4, 5.6
        brsvd = [lower, 0, 0, 0]
        field = mock.MagicMock(lbvc=6, blev=point,
                               stash=iris.fileformats.pp.STASH(1, 0, 9),
                               lbuser=[0] * 7, lbrsvd=[0] * 4,
                               brsvd=brsvd, brlev=upper)
        load = mock.Mock(return_value=iter([field]))
        with mock.patch('iris.fileformats.pp.load', new=load) as load:
            cube = next(iris.fileformats.pp.load_cubes('DUMMY'))

        self.assertIn('soil', cube.standard_name)
        self._test_coord(cube, point, bounds=[lower, upper],
                         standard_name='depth')

        # Now use the save rules to convert the Cube back into a PPField.
        field = iris.fileformats.pp.PPField3()
        field.lbfc = 0
        field.lbvc = 0
        field.brlev = None
        field.brsvd = [None] * 4
        iris.fileformats.pp._ensure_save_rules_loaded()
        iris.fileformats.pp._save_rules.verify(cube, field)

        # Check the vertical coordinate is as originally specified.
        self.assertEqual(field.lbvc, 6)
        self.assertEqual(field.blev, point)
        self.assertEqual(field.brsvd[0], lower)
        self.assertEqual(field.brlev, upper)
Example #34
0
 def test_two_plots_with_independent_axes(self):
     c1 = Contour(self.cube, self.axes)
     levels = 5
     other = _add_levels(self.cube, levels)[:, 0]
     c2 = Contour(other, self.axes)
     with mock.patch('cube_browser.Contour.__call__') as func:
         browser = Browser([c1, c2])
         browser.display()
         # Check the initial render.
         self.assertEqual(func.call_count, 2)
         expected = [mock.call(time=self.value),
                     mock.call(model_level_number=self.value)]
         func.assert_has_calls(expected)
         # Now simulate a 'time' slider change.
         slider = browser._slider_by_name['time']
         change = dict(owner=slider)
         browser.on_change(change)
         self.assertEqual(func.call_count, 3)
         expected.append(mock.call(time=self.value))
         func.assert_has_calls(expected)
         # Now simulate a 'model_level_number' slider change.
         slider = browser._slider_by_name['model_level_number']
         change = dict(owner=slider)
         browser.on_change(change)
         self.assertEqual(func.call_count, 4)
         expected.append(mock.call(model_level_number=self.value))
         func.assert_has_calls(expected)
Example #35
0
 def test_33214(self):
     with mock.patch('warnings.warn') as warn:
         t = pp.BitwiseInt(33214)
     self.assertEqual(warn.call_count, 1)
     self.assertEqual(t[0], 4)
     self.assertEqual(t.flag1, 0)
     self.assertEqual(t.flag2, 1)
Example #36
0
    def test_standard_operators(self):
        with mock.patch('warnings.warn') as warn:
            t = pp.BitwiseInt(323)
        self.assertEqual(warn.call_count, 1)
        
        self.assertTrue(t == 323)
        self.assertFalse(t == 324)
        
        self.assertFalse(t != 323)
        self.assertTrue(t != 324)
        
        self.assertTrue(t >= 323)
        self.assertFalse(t >= 324)
        
        self.assertFalse(t > 323)
        self.assertTrue(t > 322)
        
        self.assertTrue(t <= 323)
        self.assertFalse(t <= 322)
        
        self.assertFalse(t < 323)
        self.assertTrue(t < 324)

        self.assertTrue(t in [323])
        self.assertFalse(t in [324])
Example #37
0
 def test_many(self):
     for i in range(100):
         lbproc = _LBProc(i)
         with mock.patch('warnings.warn') as warn:
             flag = lbproc.flag2
         self.assertEqual(warn.call_count, 1)
         self.assertEqual(flag, bool(i & 2))
    def check_mdtol(self, mdtol=None):
        src_grid, target_grid = self.grids()
        if mdtol is None:
            regridder = AreaWeightedRegridder(src_grid, target_grid)
            mdtol = 1
        else:
            regridder = AreaWeightedRegridder(src_grid,
                                              target_grid,
                                              mdtol=mdtol)

        # Make a new cube to regrid with different data so we can
        # distinguish between regridding the original src grid
        # definition cube and the cube passed to the regridder.
        src = src_grid.copy()
        src.data += 10

        with mock.patch(
                'iris.experimental.regrid.'
                'regrid_area_weighted_rectilinear_src_and_grid',
                return_value=mock.sentinel.result) as regrid:
            result = regridder(src)

        self.assertEqual(regrid.call_count, 1)
        _, args, kwargs = regrid.mock_calls[0]

        self.assertEqual(args[0], src)
        self.assertEqual(self.extract_grid(args[1]),
                         self.extract_grid(target_grid))
        self.assertEqual(kwargs, {'mdtol': mdtol})
        self.assertIs(result, mock.sentinel.result)
Example #39
0
 def _check(self, request_warning):
     this = 'iris.fileformats.grib._load_convert.options'
     with mock.patch(this, warn_on_unsupported=request_warning):
         metadata = deepcopy(self.metadata)
         perturbationNumber = 666
         section = {'perturbationNumber': perturbationNumber}
         forecast_reference_time = mock.sentinel.forecast_reference_time
         # The called being tested.
         product_definition_template_1(section, metadata,
                                       forecast_reference_time)
         expected = deepcopy(self.metadata)
         expected['cell_methods'].append(self.cell_method)
         realization = DimCoord(perturbationNumber,
                                standard_name='realization',
                                units='no_unit')
         expected['aux_coords_and_dims'].append((realization, None))
         self.assertEqual(metadata, expected)
         if request_warning:
             warn_msgs = [mcall[1][0] for mcall in warnings.warn.mock_calls]
             expected_msgs = ['type of ensemble', 'number of forecasts']
             for emsg in expected_msgs:
                 matches = [wmsg for wmsg in warn_msgs if emsg in wmsg]
                 self.assertEqual(len(matches), 1)
                 warn_msgs.remove(matches[0])
         else:
             self.assertEqual(len(warnings.warn.mock_calls), 0)
    def setUp(self):
        # Create coordinate cf variables and pyke engine.
        points = np.arange(6).reshape(2, 3)
        self.cf_coord_var = mock.Mock(
            dimensions=('foo', 'bar'),
            cf_name='wibble',
            standard_name=None,
            long_name='wibble',
            units='m',
            shape=points.shape,
            dtype=points.dtype,
            __getitem__=lambda self, key: points[key])

        self.engine = mock.Mock(
            cube=mock.Mock(),
            cf_var=mock.Mock(dimensions=('foo', 'bar')),
            filename='DUMMY',
            provides=dict(coordinates=[]))

        # Create patch for deferred loading that prevents attempted
        # file access. This assumes that self.cf_bounds_var is
        # defined in the test case.
        def patched__getitem__(proxy_self, keys):
            variable = None
            for var in (self.cf_coord_var, self.cf_bounds_var):
                if proxy_self.variable_name == var.cf_name:
                    return var[keys]
            raise RuntimeError()

        self.deferred_load_patch = mock.patch(
            'iris.fileformats.netcdf.NetCDFDataProxy.__getitem__',
            new=patched__getitem__)
    def test_fastest_varying_vertex_dim(self):
        bounds = np.arange(24).reshape(2, 3, 4)
        self.cf_bounds_var = mock.Mock(
            dimensions=('foo', 'bar', 'nv'),
            cf_name='wibble_bnds',
            shape=bounds.shape,
            dtype=bounds.dtype,
            __getitem__=lambda self, key: bounds[key])

        expected_coord = AuxCoord(
            self.cf_coord_var[:],
            long_name=self.cf_coord_var.long_name,
            var_name=self.cf_coord_var.cf_name,
            units=self.cf_coord_var.units,
            bounds=bounds)

        get_cf_bounds_var_patch = mock.patch(
            'iris.fileformats._pyke_rules.compiled_krb.'
            'fc_rules_cf_fc.get_cf_bounds_var',
            return_value=self.cf_bounds_var)

        # Asserts must lie within context manager because of deferred loading.
        with self.deferred_load_patch, get_cf_bounds_var_patch:
            build_auxiliary_coordinate(self.engine, self.cf_coord_var)

            # Test that expected coord is built and added to cube.
            self.engine.cube.add_aux_coord.assert_called_with(
                expected_coord, [0, 1])

            # Test that engine.provides container is correctly populated.
            expected_list = [(expected_coord, self.cf_coord_var.cf_name)]
            self.assertEqual(self.engine.provides['coordinates'],
                             expected_list)
Example #42
0
    def setUp(self):
        self.container = mock.Mock(name='container', attributes={})
        self.data = np.array(1, dtype='int32')

        patch = mock.patch('netCDF4.Dataset')
        mock_netcdf_dataset = patch.start()
        self.addCleanup(patch.stop)
Example #43
0
 def test_negative_number(self):
     with mock.patch('warnings.warn') as warn:
         try:
             _ = pp.BitwiseInt(-5)
         except ValueError as err:
             self.assertEqual(str(err), 'Negative numbers not supported with splittable integers object')
     self.assertEqual(warn.call_count, 1)
Example #44
0
 def test_fractional_hours(self, mock_set):
     cell_method = CellMethod("sum", "time", "25.9 hours")
     with mock.patch("warnings.warn") as warn:
         set_time_increment(cell_method, mock.sentinel.grib)
     warn.assert_called_once_with("Truncating floating point timeIncrement " "25.9 to integer value 25")
     mock_set.assert_any_call(mock.sentinel.grib, "indicatorOfUnitForTimeIncrement", 1)
     mock_set.assert_any_call(mock.sentinel.grib, "timeIncrement", 25)
Example #45
0
    def test_float64(self):
        # Tests down-casting of >f8 data to >f4.

        def field_checksum(data):
            field = TestPPField()
            field.dummy1 = 0
            field.dummy2 = 0
            field.dummy3 = 0
            field.dummy4 = 0
            field.lbtim = 0
            field.lblrec = 0
            field.lbrow = 0
            field.lbext = 0
            field.lbpack = 0
            field.lbuser = 0
            field.brsvd = 0
            field.bdatum = 0
            field.data = data
            with self.temp_filename('.pp') as temp_filename:
                with open(temp_filename, 'wb') as pp_file:
                    field.save(pp_file)
                checksum = self.file_checksum(temp_filename)
            return checksum

        data_64 = np.linspace(0, 1, num=10, endpoint=False).reshape(2, 5)
        checksum_32 = field_checksum(data_64.astype('>f4'))
        with mock.patch('warnings.warn') as warn:
            checksum_64 = field_checksum(data_64.astype('>f8'))

        self.assertEqual(checksum_32, checksum_64)
        warn.assert_called_once_with(
            'Downcasting array precision from float64 to float32 for save.'
            'If float64 precision is required then please save in a '
            'different format')
Example #46
0
    def test_standard_operators(self):
        with mock.patch('warnings.warn') as warn:
            t = pp.BitwiseInt(323)
        self.assertEqual(warn.call_count, 1)

        self.assertTrue(t == 323)
        self.assertFalse(t == 324)

        self.assertFalse(t != 323)
        self.assertTrue(t != 324)

        self.assertTrue(t >= 323)
        self.assertFalse(t >= 324)

        self.assertFalse(t > 323)
        self.assertTrue(t > 322)

        self.assertTrue(t <= 323)
        self.assertFalse(t <= 322)

        self.assertFalse(t < 323)
        self.assertTrue(t < 324)

        self.assertTrue(t in [323])
        self.assertFalse(t in [324])
    def check_mdtol(self, mdtol=None):
        src_grid, target_grid = self.grids()
        if mdtol is None:
            regridder = AreaWeightedRegridder(src_grid, target_grid)
            mdtol = 1
        else:
            regridder = AreaWeightedRegridder(src_grid, target_grid,
                                              mdtol=mdtol)

        # Make a new cube to regrid with different data so we can
        # distinguish between regridding the original src grid
        # definition cube and the cube passed to the regridder.
        src = src_grid.copy()
        src.data += 10

        with mock.patch('iris.experimental.regrid.'
                        'regrid_area_weighted_rectilinear_src_and_grid',
                        return_value=mock.sentinel.result) as regrid:
            result = regridder(src)

        self.assertEqual(regrid.call_count, 1)
        _, args, kwargs = regrid.mock_calls[0]

        self.assertEqual(args[0], src)
        self.assertEqual(self.extract_grid(args[1]),
                         self.extract_grid(target_grid))
        self.assertEqual(kwargs, {'mdtol': mdtol})
        self.assertIs(result, mock.sentinel.result)
Example #48
0
 def test_two_plots_with_independent_axes(self):
     c1 = Contour(self.cube, self.axes)
     levels = 5
     other = _add_levels(self.cube, levels)[:, 0]
     c2 = Contour(other, self.axes)
     with mock.patch('cube_browser.Contour.__call__') as func:
         browser = Browser([c1, c2])
         browser.display()
         # Check the initial render.
         self.assertEqual(func.call_count, 2)
         expected = [
             mock.call(time=self.value),
             mock.call(model_level_number=self.value)
         ]
         func.assert_has_calls(expected)
         # Now simulate a 'time' slider change.
         slider = browser._slider_by_name['time']
         change = dict(owner=slider)
         browser.on_change(change)
         self.assertEqual(func.call_count, 3)
         expected.append(mock.call(time=self.value))
         func.assert_has_calls(expected)
         # Now simulate a 'model_level_number' slider change.
         slider = browser._slider_by_name['model_level_number']
         change = dict(owner=slider)
         browser.on_change(change)
         self.assertEqual(func.call_count, 4)
         expected.append(mock.call(model_level_number=self.value))
         func.assert_has_calls(expected)
Example #49
0
 def test_deferred_bytes(self):
     # Check that a field with deferred array bytes in core_data gets a
     # dask array.
     fname = mock.sentinel.fname
     position = mock.sentinel.position
     n_bytes = mock.sentinel.n_bytes
     newbyteorder = mock.Mock(return_value=mock.sentinel.dtype)
     dtype = mock.Mock(newbyteorder=newbyteorder)
     deferred_bytes = (fname, position, n_bytes, dtype)
     core_data = mock.MagicMock(return_value=deferred_bytes)
     field = mock.Mock(core_data=core_data)
     data_shape = (100, 120)
     land_mask = mock.Mock()
     proxy = mock.Mock(dtype=np.dtype('f4'),
                       shape=data_shape,
                       spec=pp.PPDataProxy)
     # We can't directly inspect the concrete data source underlying
     # the dask array, so instead we patch the proxy creation and check it's
     # being created and invoked correctly.
     with mock.patch('iris.fileformats.pp.PPDataProxy') as PPDataProxy:
         PPDataProxy.return_value = proxy
         pp._create_field_data(field, data_shape, land_mask)
     # The data should be assigned via field.data. As this is a mock object
     # we can check the attribute directly.
     self.assertEqual(field.data.shape, data_shape)
     self.assertEqual(field.data.dtype, np.dtype('f4'))
     # Is it making use of a correctly configured proxy?
     # NB. We know it's *using* the result of this call because
     # that's where the dtype came from above.
     PPDataProxy.assert_called_once_with(
         (data_shape), dtype, fname, position, n_bytes, field.raw_lbpack,
         field.boundary_packing, field.bmdi, land_mask)
Example #50
0
    def test_invalid_units(self):
        # Mock converter() function that returns an invalid
        # units string amongst the collection of other elements.
        factories = None
        references = None
        standard_name = None
        long_name = None
        units = 'wibble'  # Invalid unit.
        attributes = dict(source='test')
        cell_methods = None
        dim_coords_and_dims = None
        aux_coords_and_dims = None
        metadata = iris.fileformats.rules.ConversionMetadata(
            factories, references, standard_name, long_name, units, attributes,
            cell_methods, dim_coords_and_dims, aux_coords_and_dims)
        converter = mock.Mock(return_value=metadata)

        field = mock.Mock()
        with mock.patch('warnings.warn') as warn:
            cube, factories, references = _make_cube(field, converter)

        # Check attributes dictionary is correctly populated.
        expected_attributes = attributes.copy()
        expected_attributes['invalid_units'] = units
        self.assertEqual(cube.attributes, expected_attributes)

        # Check warning was raised.
        self.assertEqual(warn.call_count, 1)
        warning_msg = warn.call_args[0][0]
        self.assertIn('invalid units', warning_msg)
Example #51
0
    def test_potential_temperature_level_round_trip(self):
        # Check save+load for data on 'potential temperature' levels.

        # Use pp.load_cubes() to convert a fake PPField into a Cube.
        # NB. Use MagicMock so that SplittableInt header items, such as
        # LBCODE, support len().
        potm_value = 22.5
        field = mock.MagicMock(lbvc=19,
                               blev=potm_value,
                               lbuser=[0] * 7,
                               lbrsvd=[0] * 4)
        load = mock.Mock(return_value=iter([field]))
        with mock.patch('iris.fileformats.pp.load', new=load) as load:
            cube = next(iris.fileformats.pp.load_cubes('DUMMY'))

        self._test_coord(cube,
                         potm_value,
                         standard_name='air_potential_temperature')

        # Now use the save rules to convert the Cube back into a PPField.
        field = iris.fileformats.pp.PPField3()
        field.lbfc = 0
        field.lbvc = 0
        run_save_rules(cube, field)

        # Check the vertical coordinate is as originally specified.
        self.assertEqual(field.lbvc, 19)
        self.assertEqual(field.blev, potm_value)
 def _check(self, request_warning):
     this = 'iris.fileformats.grib._load_convert.options'
     with mock.patch(this, warn_on_unsupported=request_warning):
         metadata = deepcopy(self.metadata)
         perturbationNumber = 666
         section = {'perturbationNumber': perturbationNumber}
         forecast_reference_time = mock.sentinel.forecast_reference_time
         # The called being tested.
         product_definition_template_1(section, metadata,
                                       forecast_reference_time)
         expected = deepcopy(self.metadata)
         expected['cell_methods'].append(self.cell_method)
         realization = DimCoord(perturbationNumber,
                                standard_name='realization',
                                units='no_unit')
         expected['aux_coords_and_dims'].append((realization, None))
         self.assertEqual(metadata, expected)
         if request_warning:
             warn_msgs = [mcall[1][0] for mcall in warnings.warn.mock_calls]
             expected_msgs = ['type of ensemble', 'number of forecasts']
             for emsg in expected_msgs:
                 matches = [wmsg for wmsg in warn_msgs if emsg in wmsg]
                 self.assertEqual(len(matches), 1)
                 warn_msgs.remove(matches[0])
         else:
             self.assertEqual(len(warnings.warn.mock_calls), 0)
Example #53
0
    def check_non_trivial_coordinate_warning(self, field):
        field.lbegin = 0
        field.lbrow = 10
        field.lbnpt = 12
        # stash m01s31i020
        field.lbuser = [None, None, 121416, 20, None, None, 1]
        orig_bdx, orig_bdy = field.bdx, field.bdy

        x = np.array([1, 2, 6])
        y = np.array([1, 2, 6])
        with self.mock_for_extract_field([field], x, y) as ff2pp:
            ff2pp._ff_header.dataset_type = 5
            with mock.patch('warnings.warn') as warn:
                list(ff2pp._extract_field())

        # Check the values are unchanged.
        self.assertEqual(field.bdy, orig_bdy)
        self.assertEqual(field.bdx, orig_bdx)

        # Check a warning was raised with a suitable message.
        warn_error_tmplt = 'Unexpected warning message: {}'
        non_trivial_coord_warn_msg = warn.call_args[0][0]
        msg = ('The x or y coordinates of your boundary condition field may '
               'be incorrect, not having taken into account the boundary '
               'size.')
        self.assertTrue(non_trivial_coord_warn_msg.startswith(msg),
                        warn_error_tmplt.format(non_trivial_coord_warn_msg))