def test_factory_defns_one_missing_term(self): self.cube1.add_aux_factory(mock.MagicMock(spec=HybridPressureFactory)) no_delta_factory = mock.MagicMock(spec=HybridPressureFactory) no_delta_factory.delta = None self.cube2.add_aux_factory(no_delta_factory) self.check_fail("cube.aux_factories", "differ")
def test_potential_temperature_level_round_trip(self): # Check save+load for data on 'potential temperature' levels. # Use pp.load_cubes() to convert a fake PPField into a Cube. # NB. Use MagicMock so that SplittableInt header items, such as # LBCODE, support len(). potm_value = 22.5 mock_data = np.zeros(1) mock_core_data = mock.MagicMock(return_value=mock_data) field = mock.MagicMock(lbvc=19, blev=potm_value, lbuser=[0] * 7, lbrsvd=[0] * 4, core_data=mock_core_data, realised_dtype=mock_data.dtype) load = mock.Mock(return_value=iter([field])) with mock.patch('iris.fileformats.pp.load', new=load): cube = next(iris.fileformats.pp.load_cubes('DUMMY')) self._test_coord(cube, potm_value, standard_name='air_potential_temperature') # Now use the save rules to convert the Cube back into a PPField. field = iris.fileformats.pp.PPField3() field.lbfc = 0 field.lbvc = 0 field = verify(cube, field) # Check the vertical coordinate is as originally specified. self.assertEqual(field.lbvc, 19) self.assertEqual(field.blev, potm_value)
def setUp(self): # Create dummy pyke engine. self.engine = mock.Mock(cube=mock.Mock(), cf_var=mock.Mock(dimensions=('foo', 'bar')), filename='DUMMY', provides=dict(coordinates=[])) points = np.arange(6) self.cf_coord_var = mock.Mock( dimensions=('foo', ), scale_factor=1, add_offset=0, cf_name='wibble', cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None)), standard_name=None, long_name='wibble', units='days since 1970-01-01', calendar=None, shape=points.shape, dtype=points.dtype, __getitem__=lambda self, key: points[key]) bounds = np.arange(12).reshape(6, 2) self.cf_bounds_var = mock.Mock( dimensions=('x', 'nv'), scale_factor=1, add_offset=0, cf_name='wibble_bnds', cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None)), shape=bounds.shape, dtype=bounds.dtype, __getitem__=lambda self, key: bounds[key]) self.bounds = bounds # Create patch for deferred loading that prevents attempted # file access. This assumes that self.cf_coord_var and # self.cf_bounds_var are defined in the test case. def patched__getitem__(proxy_self, keys): for var in (self.cf_coord_var, self.cf_bounds_var): if proxy_self.variable_name == var.cf_name: return var[keys] raise RuntimeError() self.patch('iris.fileformats.netcdf.NetCDFDataProxy.__getitem__', new=patched__getitem__) # Patch the helper function that retrieves the bounds cf variable. # This avoids the need for setting up further mocking of cf objects. self.use_climatology_bounds = False # Set this when you need to. def get_cf_bounds_var(coord_var): return self.cf_bounds_var, self.use_climatology_bounds self.patch( 'iris.fileformats._pyke_rules.compiled_krb.' 'fc_rules_cf_fc.get_cf_bounds_var', new=get_cf_bounds_var)
def _make_cf_var(self, dtype): variable = mock.Mock(spec=netCDF4.Variable, dtype=dtype) cf_var = mock.MagicMock(spec=iris.fileformats.cf.CFVariable, cf_data=variable, cf_name='DUMMY_VAR', cf_group=mock.Mock(), dtype=dtype, shape=mock.MagicMock()) return cf_var
def _make(self, chunksizes): cf_data = mock.Mock(_FillValue=None) cf_data.chunking = mock.MagicMock(return_value=chunksizes) cf_var = mock.MagicMock(spec=iris.fileformats.cf.CFVariable, dtype=np.dtype('i4'), cf_data=cf_data, cf_name='DUMMY_VAR', shape=self.shape) return cf_var
def _make(self, attrs): cf_attrs_unused = mock.Mock(return_value=attrs) cf_var = mock.MagicMock(spec=iris.fileformats.cf.CFVariable, dtype=np.dtype('i4'), cf_data=mock.Mock(), cf_name='DUMMY_VAR', cf_group=mock.Mock(), cf_attrs_unused=cf_attrs_unused, shape=mock.MagicMock()) return cf_var
def _mock_field(**kwargs): mock_data = np.zeros(1) mock_core_data = mock.MagicMock(return_value=mock_data) field = mock.MagicMock(lbuser=[0] * 7, lbrsvd=[0] * 4, brsvd=[0] * 4, brlev=0, t1=mock.MagicMock(year=1990, month=1, day=3), t2=mock.MagicMock(year=1990, month=1, day=3), core_data=mock_core_data, realised_dtype=mock_data.dtype) field.configure_mock(**kwargs) return field
def _make(self, attrs): shape = (1, ) cf_attrs_unused = mock.Mock(return_value=attrs) cf_data = mock.Mock(_FillValue=None) cf_data.chunking = mock.MagicMock(return_value=shape) cf_var = mock.MagicMock(spec=iris.fileformats.cf.CFVariable, dtype=np.dtype('i4'), cf_data=cf_data, cf_name='DUMMY_VAR', cf_group=mock.Mock(), cf_attrs_unused=cf_attrs_unused, shape=shape) return cf_var
def test_cell_methods(self): header = mock.MagicMock() column_headings = {'Species': [1, 2, 3], 'Quantity': [4, 5, 6], "Units": ['m', 'm', 'm'], 'Z': [1, 2, 3]} coords = mock.MagicMock() data_arrays = [mock.Mock(), mock.Mock()] cell_methods = ["cell_method_1", "cell_method_2"] self.patch('iris.fileformats.name_loaders._cf_height_from_name') self.patch('iris.cube.Cube') cubes = list(_generate_cubes(header, column_headings, coords, data_arrays, cell_methods)) cubes[0].assert_has_calls([mock.call.add_cell_method('cell_method_1')]) cubes[1].assert_has_calls([mock.call.add_cell_method('cell_method_2')])
def test_soil_levels(self): level = 1234 field = mock.MagicMock(lbvc=6, lblev=level, brsvd=[0, 0], brlev=0) self._test_for_coord(field, convert, self._is_soil_model_level_number_coord, expected_points=[level], expected_bounds=None)
def test_365_calendar(self): f = mock.MagicMock(lbtim=SplittableInt(4, { 'ia': 2, 'ib': 1, 'ic': 0 }), lbyr=2013, lbmon=1, lbdat=1, lbhr=12, lbmin=0, lbsec=0, t1=cftime.datetime(2013, 1, 1, 12, 0, 0), t2=cftime.datetime(2013, 1, 2, 12, 0, 0), spec=PPField3) f.time_unit = six.create_bound_method(PPField3.time_unit, f) f.calendar = cf_units.CALENDAR_365_DAY (factories, references, standard_name, long_name, units, attributes, cell_methods, dim_coords_and_dims, aux_coords_and_dims) = convert(f) def is_t_coord(coord_and_dims): coord, dims = coord_and_dims return coord.standard_name == 'time' coords_and_dims = list(filter(is_t_coord, aux_coords_and_dims)) self.assertEqual(len(coords_and_dims), 1) coord, dims = coords_and_dims[0] self.assertEqual(guess_coord_axis(coord), 'T') self.assertEqual(coord.units.calendar, '365_day')
def test_soil_depth_round_trip(self): # Use pp.load_cubes() to convert a fake PPField into a Cube. # NB. Use MagicMock so that SplittableInt header items, such as # LBCODE, support len(). lower, point, upper = 1.2, 3.4, 5.6 brsvd = [lower, 0, 0, 0] field = mock.MagicMock(lbvc=6, blev=point, stash=iris.fileformats.pp.STASH(1, 0, 9), lbuser=[0] * 7, lbrsvd=[0] * 4, brsvd=brsvd, brlev=upper) load = mock.Mock(return_value=iter([field])) with mock.patch('iris.fileformats.pp.load', new=load) as load: cube = next(iris.fileformats.pp.load_cubes('DUMMY')) self.assertIn('soil', cube.standard_name) self._test_coord(cube, point, bounds=[lower, upper], standard_name='depth') # Now use the save rules to convert the Cube back into a PPField. field = iris.fileformats.pp.PPField3() field.lbfc = 0 field.lbvc = 0 field.brlev = None field.brsvd = [None] * 4 iris.fileformats.pp._ensure_save_rules_loaded() iris.fileformats.pp._save_rules.verify(cube, field) # Check the vertical coordinate is as originally specified. self.assertEqual(field.lbvc, 6) self.assertEqual(field.blev, point) self.assertEqual(field.brsvd[0], lower) self.assertEqual(field.brlev, upper)
def test_cached(self): # Make sure attribute access to the underlying netCDF4.Variable # is cached. name = 'foo' nc_var = mock.MagicMock() cf_var = cf.CFAncillaryDataVariable(name, nc_var) self.assertEqual(nc_var.ncattrs.call_count, 1) # Accessing a netCDF attribute should result in no further calls # to nc_var.ncattrs() and the creation of an attribute on the # cf_var. # NB. Can't use hasattr() because that triggers the attribute # to be created! self.assertTrue('coordinates' not in cf_var.__dict__) _ = cf_var.coordinates self.assertEqual(nc_var.ncattrs.call_count, 1) self.assertTrue('coordinates' in cf_var.__dict__) # Trying again results in no change. _ = cf_var.coordinates self.assertEqual(nc_var.ncattrs.call_count, 1) self.assertTrue('coordinates' in cf_var.__dict__) # Trying another attribute results in just a new attribute. self.assertTrue('standard_name' not in cf_var.__dict__) _ = cf_var.standard_name self.assertEqual(nc_var.ncattrs.call_count, 1) self.assertTrue('standard_name' in cf_var.__dict__)
def test_soil_level_round_trip(self): # Use pp.load_cubes() to convert a fake PPField into a Cube. # NB. Use MagicMock so that SplittableInt header items, such as # LBCODE, support len(). soil_level = 1234 field = mock.MagicMock(lbvc=6, lblev=soil_level, stash=iris.fileformats.pp.STASH(1, 0, 9), lbuser=[0] * 7, lbrsvd=[0] * 4, brsvd=[0] * 4, brlev=0) load = mock.Mock(return_value=iter([field])) with mock.patch('iris.fileformats.pp.load', new=load) as load: cube = next(iris.fileformats.pp.load_cubes('DUMMY')) self.assertIn('soil', cube.standard_name) self._test_coord(cube, soil_level, long_name='soil_model_level_number') # Now use the save rules to convert the Cube back into a PPField. field = iris.fileformats.pp.PPField3() field.lbfc = 0 field.lbvc = 0 field.brsvd = [None] * 4 field.brlev = None run_save_rules(cube, field) # Check the vertical coordinate is as originally specified. self.assertEqual(field.lbvc, 6) self.assertEqual(field.lblev, soil_level) self.assertEqual(field.blev, soil_level) self.assertEqual(field.brsvd[0], 0) self.assertEqual(field.brlev, 0)
def test_deferred_bytes(self): # Check that a field with deferred array bytes in core_data gets a # dask array. fname = mock.sentinel.fname position = mock.sentinel.position n_bytes = mock.sentinel.n_bytes newbyteorder = mock.Mock(return_value=mock.sentinel.dtype) dtype = mock.Mock(newbyteorder=newbyteorder) deferred_bytes = (fname, position, n_bytes, dtype) core_data = mock.MagicMock(return_value=deferred_bytes) field = mock.Mock(core_data=core_data) data_shape = (100, 120) land_mask = mock.Mock() proxy = mock.Mock(dtype=np.dtype('f4'), shape=data_shape, spec=pp.PPDataProxy) # We can't directly inspect the concrete data source underlying # the dask array, so instead we patch the proxy creation and check it's # being created and invoked correctly. with mock.patch('iris.fileformats.pp.PPDataProxy') as PPDataProxy: PPDataProxy.return_value = proxy pp._create_field_data(field, data_shape, land_mask) # The data should be assigned via field.data. As this is a mock object # we can check the attribute directly. self.assertEqual(field.data.shape, data_shape) self.assertEqual(field.data.dtype, np.dtype('f4')) # Is it making use of a correctly configured proxy? # NB. We know it's *using* the result of this call because # that's where the dtype came from above. PPDataProxy.assert_called_once_with( (data_shape), dtype, fname, position, n_bytes, field.raw_lbpack, field.boundary_packing, field.bmdi, land_mask)
def setUp(self): # Create a test object to stand in for a real PPField. self.pp_field = mock.MagicMock(spec=pp.PPField3) # Add minimal content required by the pp.save operation. self.pp_field.HEADER_DEFN = pp.PPField3.HEADER_DEFN self.pp_field.data = np.zeros((1, 1)) self.pp_field.save = asave
def test_datetimelike(self): # Check that cell equality works with objects with a "timetuple". dt = mock.Mock(timetuple=mock.Mock()) cell = mock.MagicMock(spec=Cell, point=datetime.datetime(2010, 3, 21), bound=None) _ = cell == dt cell.__eq__.assert_called_once_with(dt)
def test_time_mean(self): # lbproc = 128 -> mean # lbtim.ib = 2 -> simple t1 to t2 interval. field = mock.MagicMock(lbproc=128, lbtim=mock.Mock(ia=0, ib=2, ic=3)) res = _all_other_rules(field)[CELL_METHODS_INDEX] expected = [CellMethod('mean', 'time')] self.assertEqual(res, expected)
def setUp(self): # Create coordinate cf variables and pyke engine. points = np.arange(6).reshape(2, 3) cf_data = mock.Mock(_FillValue=None) cf_data.chunking = mock.MagicMock(return_value=points.shape) self.cf_coord_var = mock.Mock( spec=CFVariable, dimensions=('foo', 'bar'), cf_name='wibble', cf_data=cf_data, standard_name=None, long_name='wibble', units='m', shape=points.shape, dtype=points.dtype, __getitem__=lambda self, key: points[key]) self.engine = mock.Mock(cube=mock.Mock(), cf_var=mock.Mock(dimensions=('foo', 'bar')), filename='DUMMY', provides=dict(coordinates=[])) def patched__getitem__(proxy_self, keys): if proxy_self.variable_name == self.cf_coord_var.cf_name: return self.cf_coord_var[keys] raise RuntimeError() self.deferred_load_patch = mock.patch( 'iris.fileformats.netcdf.NetCDFDataProxy.__getitem__', new=patched__getitem__)
def test_soil_depth(self): lower, point, upper = 1.2, 3.4, 5.6 field = mock.MagicMock(lbvc=6, blev=point, brsvd=[lower, 0], brlev=upper) self._test_for_coord(field, convert, self._is_soil_depth_coord, expected_points=[point], expected_bounds=[[lower, upper]])
def test_potential_temperature_levels(self): potm_value = 27.32 field = mock.MagicMock(lbvc=19, blev=potm_value) self._test_for_coord(field, convert, TestLBVC._is_potm_level_coord, expected_points=np.array([potm_value]), expected_bounds=None)
def test_hourly_mean_over_multiple_years(self): field = mock.MagicMock(lbproc=128, lbtim=mock.Mock(ia=1, ib=3, ic=3)) res = _all_other_rules(field)[CELL_METHODS_INDEX] expected = [ CellMethod('mean within years', 'time', '1 hour'), CellMethod('mean over years', 'time') ] self.assertEqual(res, expected)
def test_cross_section_height_bdy_zero(self): lbcode = SplittableInt(19902, {'iy': slice(0, 2), 'ix': slice(2, 4)}) points = np.array([10, 20, 30, 40]) bounds = np.array([[0, 15], [15, 25], [25, 35], [35, 45]]) field = mock.MagicMock(lbcode=lbcode, bdy=0, y=points, y_bounds=bounds) self._test_for_coord(field, convert, TestLBCODE._is_cross_section_height_coord, expected_points=points, expected_bounds=bounds)
def test_hybrid_pressure_model_level_number(self): level = 5678 field = mock.MagicMock(lbvc=9, lblev=level, blev=20, brlev=23, bhlev=42, bhrlev=45, brsvd=[17, 40]) self._test_for_coord(field, convert, TestLBVC._is_model_level_number_coord, expected_points=[level], expected_bounds=None)
def test_time_mean_over_multiple_years(self): # lbtim.ib = 3 -> interval within a year, over multiple years. field = mock.MagicMock(lbproc=128, lbtim=mock.Mock(ia=0, ib=3, ic=3)) res = _all_other_rules(field)[CELL_METHODS_INDEX] expected = [ CellMethod('mean within years', 'time'), CellMethod('mean over years', 'time') ] self.assertEqual(res, expected)
def test_no_std_name(self): lbuser = [1, 0, 0, 0, 0, 0, 0] lbfc = 0 stash = STASH(lbuser[6], lbuser[3] // 1000, lbuser[3] % 1000) field = mock.MagicMock(lbuser=lbuser, lbfc=lbfc, stash=stash) (factories, references, standard_name, long_name, units, attributes, cell_methods, dim_coords_and_dims, aux_coords_and_dims) = convert(field) self.assertIsNone(standard_name) self.assertIsNone(units)
def _check_bounds_setting(self, climatological=False): # Generic test that can run with or without a climatological coord. cube = stock.climatology_3d() coord = cube.coord('time').copy() # Over-write original value from stock.climatology_3d with test value. coord.climatological = \ climatological # Set up expected strings. if climatological: property_name = 'climatology' varname_extra = 'climatology' else: property_name = 'bounds' varname_extra = 'bnds' boundsvar_name = 'time_' + varname_extra # Set up arguments for testing _create_cf_bounds. saver = mock.MagicMock(spec=Saver) # NOTE: 'saver' must have spec=Saver to fake isinstance(save, Saver), # so it can pass as 'self' in the call to _create_cf_cbounds. # Mock a '_dataset' property; not automatic because 'spec=Saver'. saver._dataset = mock.MagicMock() # Mock the '_ensure_valid_dtype' method to return an object with a # suitable 'shape' and 'dtype'. saver._ensure_valid_dtype.return_value = mock.Mock( shape=coord.bounds.shape, dtype=coord.bounds.dtype) var = mock.MagicMock(spec=nc.Variable) # Make the main call. Saver._create_cf_bounds(saver, coord, var, 'time') # Test the call of _setncattr in _create_cf_bounds. setncattr_call = mock.call(property_name, boundsvar_name.encode(encoding='ascii')) self.assertEqual(setncattr_call, var.setncattr.call_args) # Test the call of createVariable in _create_cf_bounds. dataset = saver._dataset expected_dimensions = var.dimensions + ('bnds', ) create_var_call = mock.call(boundsvar_name, coord.bounds.dtype, expected_dimensions) self.assertEqual(create_var_call, dataset.createVariable.call_args)
def test_realization(self): lbrsvd = [0] * 4 lbrsvd[3] = 71 points = np.array([71]) bounds = None field = mock.MagicMock(lbrsvd=lbrsvd) self._test_for_coord(field, convert, TestLBRSVD._is_realization, expected_points=points, expected_bounds=bounds)
def field_with_data(scale=1): x, y = 40, 30 field = mock.MagicMock(_data=np.arange(1200).reshape(y, x) * scale, lbcode=[1], lbnpt=x, lbrow=y, bzx=350, bdx=1.5, bzy=40, bdy=1.5, lbuser=[0] * 7, lbrsvd=[0] * 4) field._x_coord_name = lambda: 'longitude' field._y_coord_name = lambda: 'latitude' field.coord_system = lambda: None return field
def test_fc_cf_air_temp(self): lbuser = [1, 0, 0, 0, 0, 0, 0] lbfc = 16 stash = STASH(lbuser[6], lbuser[3] // 1000, lbuser[3] % 1000) field = mock.MagicMock(lbuser=lbuser, lbfc=lbfc, stash=stash) (factories, references, standard_name, long_name, units, attributes, cell_methods, dim_coords_and_dims, aux_coords_and_dims) = convert(field) self.assertEqual(standard_name, 'air_temperature') self.assertEqual(units, 'K')