def test_cross_reference(self): # Test the creation process for a factory definition which uses # a cross-reference. param_cube = stock.realistic_4d_no_derived() orog_coord = param_cube.coord('surface_altitude') param_cube.remove_coord(orog_coord) orog_cube = param_cube[0, 0, :, :] orog_cube.data = orog_coord.points orog_cube.rename('surface_altitude') orog_cube.units = orog_coord.units orog_cube.attributes = orog_coord.attributes # We're going to test for the presence of the hybrid height # stuff later, so let's make sure it's not already there! assert len(param_cube.aux_factories) == 0 assert not param_cube.coords('surface_altitude') # The fake PPFields which will be supplied to our converter. press_field = Mock() press_field.data = param_cube.data orog_field = Mock() orog_field.data = orog_cube.data field_generator = lambda filename: [press_field, orog_field] # A fake rule set returning: # 1) A parameter cube needing an "orography" reference # 2) An "orography" cube def converter(field): if field is press_field: src = param_cube factories = [Factory(HybridHeightFactory, [Reference('orography')])] references = [] else: src = orog_cube factories = [] references = [ReferenceTarget('orography', None)] dim_coords_and_dims = [(coord, src.coord_dims(coord)[0]) for coord in src.dim_coords] aux_coords_and_dims = [(coord, src.coord_dims(coord)) for coord in src.aux_coords] return (factories, references, src.standard_name, src.long_name, src.units, src.attributes, src.cell_methods, dim_coords_and_dims, aux_coords_and_dims) # Finish by making a fake Loader fake_loader = Loader(field_generator, {}, converter, None) cubes = load_cubes(['fake_filename'], None, fake_loader) # Check the result is a generator containing two Cubes. self.assertIsInstance(cubes, types.GeneratorType) cubes = list(cubes) self.assertEqual(len(cubes), 2) # Check the "cube" has an "aux_factory" added, which itself # must have been created with the correct arguments. self.assertEqual(len(cubes[1].aux_factories), 1) self.assertEqual(len(cubes[1].coords('surface_altitude')), 1)
def test_normal_with_explicit_none(self): with mock.patch('warnings.warn') as warn: loader = Loader(mock.sentinel.GEN_FUNC, mock.sentinel.GEN_FUNC_KWARGS, mock.sentinel.CONVERTER) self.assertEqual(warn.call_count, 0) self.assertIs(loader.field_generator, mock.sentinel.GEN_FUNC) self.assertIs(loader.field_generator_kwargs, mock.sentinel.GEN_FUNC_KWARGS) self.assertIs(loader.converter, mock.sentinel.CONVERTER)
def test_simple_factory(self): # Test the creation process for a factory definition which only # uses simple dict arguments. # Make a minimal fake data object that passes as lazy data. core_data_array = mock.Mock(compute=None, dtype=np.dtype("f4")) # Make a fake PPField which will be supplied to our converter. field = mock.Mock( core_data=mock.Mock(return_value=core_data_array), realised_dtype=np.dtype("f4"), bmdi=None, ) def field_generator(filename): return [field] # A fake conversion function returning: # 1) A parameter cube needing a simple factory construction. aux_factory = mock.Mock() factory = mock.Mock() factory.args = [{"name": "foo"}] factory.factory_class = (lambda *args: setattr( aux_factory, "fake_args", args) or aux_factory) def converter(field): return ConversionMetadata([factory], [], "", "", "", {}, [], [], []) # Finish by making a fake Loader fake_loader = Loader(field_generator, {}, converter) cubes = load_cubes(["fake_filename"], None, fake_loader) # Check the result is a generator with a single entry. self.assertIsInstance(cubes, types.GeneratorType) try: # Suppress the normal Cube.coord() and Cube.add_aux_factory() # methods. coord_method = Cube.coord add_aux_factory_method = Cube.add_aux_factory Cube.coord = lambda self, **args: args Cube.add_aux_factory = lambda self, aux_factory: setattr( self, "fake_aux_factory", aux_factory) cubes = list(cubes) finally: Cube.coord = coord_method Cube.add_aux_factory = add_aux_factory_method self.assertEqual(len(cubes), 1) # Check the "cube" has an "aux_factory" added, which itself # must have been created with the correct arguments. self.assertTrue(hasattr(cubes[0], "fake_aux_factory")) self.assertIs(cubes[0].fake_aux_factory, aux_factory) self.assertTrue(hasattr(aux_factory, "fake_args")) self.assertEqual(aux_factory.fake_args, ({"name": "foo"}, ))
def test_cross_reference(self): # Test the creation process for a factory definition which uses # a cross-reference. param_cube = stock.realistic_4d_no_derived() orog_coord = param_cube.coord('surface_altitude') param_cube.remove_coord(orog_coord) orog_cube = param_cube[0, 0, :, :] orog_cube.data = orog_coord.points orog_cube.rename('surface_altitude') orog_cube.units = orog_coord.units orog_cube.attributes = orog_coord.attributes # We're going to test for the presence of the hybrid height # stuff later, so let's make sure it's not already there! assert len(param_cube.aux_factories) == 0 assert not param_cube.coords('surface_altitude') press_field = Mock() orog_field = Mock() field_generator = lambda filename: [press_field, orog_field] # A fake rule set returning: # 1) A parameter cube needing an "orography" reference # 2) An "orography" cube factory = Factory(HybridHeightFactory, [Reference('orography')]) press_rule_result = RuleResult(param_cube, Mock(), [factory]) orog_rule_result = RuleResult(orog_cube, Mock(), []) rules = Mock() rules.result = lambda field: \ press_rule_result if field is press_field else orog_rule_result # A fake cross-reference rule set ref = ReferenceTarget('orography', None) orog_xref_rule = Mock() orog_xref_rule.run_actions = lambda cube, field: (ref, ) xref_rules = Mock() xref_rules.matching_rules = lambda field: \ [orog_xref_rule] if field is orog_field else [] # Finish by making a fake Loader name = 'FAKE_PP' fake_loader = Loader(field_generator, {}, rules, xref_rules, name) cubes = load_cubes(['fake_filename'], None, fake_loader) # Check the result is a generator containing both of our cubes. self.assertIsInstance(cubes, types.GeneratorType) cubes = list(cubes) self.assertEqual(len(cubes), 2) self.assertIs(cubes[0], orog_cube) self.assertIs(cubes[1], param_cube) # Check the "cube" has an "aux_factory" added, which itself # must have been created with the correct arguments. self.assertEqual(len(param_cube.aux_factories), 1) self.assertEqual(len(param_cube.coords('surface_altitude')), 1)
def test(self): generator = iris_grib._load_generate converter = iris_grib._load_convert.convert files = mock.sentinel.FILES callback = mock.sentinel.CALLBACK expected_result = mock.sentinel.RESULT with mock.patch('iris.fileformats.rules.load_cubes') as rules_load: rules_load.return_value = expected_result result = load_cubes(files, callback) kwargs = {} loader = Loader(generator, kwargs, converter) rules_load.assert_called_once_with(files, callback, loader) self.assertIs(result, expected_result)
def test_deprecated_custom_rules(self): with mock.patch('warnings.warn') as warn: loader = Loader(mock.sentinel.GEN_FUNC, mock.sentinel.GEN_FUNC_KWARGS, mock.sentinel.CONVERTER, mock.sentinel.CUSTOM_RULES) self.assertEqual(warn.call_count, 1) self.assertEqual(warn.call_args[0][0], 'The `legacy_custom_rules` attribute is deprecated.') self.assertIs(loader.field_generator, mock.sentinel.GEN_FUNC) self.assertIs(loader.field_generator_kwargs, mock.sentinel.GEN_FUNC_KWARGS) self.assertIs(loader.converter, mock.sentinel.CONVERTER) self.assertIs(loader.legacy_custom_rules, mock.sentinel.CUSTOM_RULES)
def test_simple_factory(self): # Test the creation process for a factory definition which only # uses simple dict arguments. # The fake PPField which will be supplied to our converter. field = Mock() field.data = None def field_generator(filename): return [field] # A fake conversion function returning: # 1) A parameter cube needing a simple factory construction. aux_factory = Mock() factory = Mock() factory.args = [{'name': 'foo'}] factory.factory_class = lambda *args: \ setattr(aux_factory, 'fake_args', args) or aux_factory def converter(field): return ConversionMetadata([factory], [], '', '', '', {}, [], [], []) # Finish by making a fake Loader fake_loader = Loader(field_generator, {}, converter, None) cubes = load_cubes(['fake_filename'], None, fake_loader) # Check the result is a generator with a single entry. self.assertIsInstance(cubes, types.GeneratorType) try: # Suppress the normal Cube.coord() and Cube.add_aux_factory() # methods. coord_method = Cube.coord add_aux_factory_method = Cube.add_aux_factory Cube.coord = lambda self, **args: args Cube.add_aux_factory = lambda self, aux_factory: \ setattr(self, 'fake_aux_factory', aux_factory) cubes = list(cubes) finally: Cube.coord = coord_method Cube.add_aux_factory = add_aux_factory_method self.assertEqual(len(cubes), 1) # Check the "cube" has an "aux_factory" added, which itself # must have been created with the correct arguments. self.assertTrue(hasattr(cubes[0], 'fake_aux_factory')) self.assertIs(cubes[0].fake_aux_factory, aux_factory) self.assertTrue(hasattr(aux_factory, 'fake_args')) self.assertEqual(aux_factory.fake_args, ({'name': 'foo'},))
def test_simple_factory(self): # Test the creation process for a factory definition which only # uses simple dict arguments. field = Mock() field_generator = lambda filename: [field] # A fake rule set returning: # 1) A parameter cube needing a simple factory construction. src_cube = Mock() src_cube.coord = lambda **args: args src_cube.add_aux_factory = lambda aux_factory: \ setattr(src_cube, 'fake_aux_factory', aux_factory) aux_factory = Mock() factory = Mock() factory.args = [{'name': 'foo'}] factory.factory_class = lambda *args: \ setattr(aux_factory, 'fake_args', args) or aux_factory rule_result = RuleResult(src_cube, Mock(), [factory]) rules = Mock() rules.result = lambda field: rule_result # A fake cross-reference rule set xref_rules = Mock() xref_rules.matching_rules = lambda field: [] # Finish by making a fake Loader name = 'FAKE_PP' fake_loader = Loader(field_generator, {}, rules, xref_rules, name) cubes = load_cubes(['fake_filename'], None, fake_loader) # Check the result is a generator with our "cube" as the only # entry. self.assertIsInstance(cubes, types.GeneratorType) cubes = list(cubes) self.assertEqual(len(cubes), 1) self.assertIs(cubes[0], src_cube) # Check the "cube" has an "aux_factory" added, which itself # must have been created with the correct arguments. self.assertTrue(hasattr(src_cube, 'fake_aux_factory')) self.assertIs(src_cube.fake_aux_factory, aux_factory) self.assertTrue(hasattr(aux_factory, 'fake_args')) self.assertEqual(aux_factory.fake_args, ({'name': 'foo'}, ))
def load(filenames, callback=None): """ Load structured FieldsFiles and PP files. Args: * filenames: One or more filenames. Kwargs: * callback: A modifier/filter function. Please see the module documentation for :mod:`iris`. .. note:: Unlike the standard :func:`iris.load` operation, the callback is applied to the final result cubes, not individual input fields. Returns: An :class:`iris.cube.CubeList`. This is a streamlined load operation, to be used only on fieldsfiles or PP files whose fields repeat regularly over the same vertical levels and times. The results aim to be equivalent to those generated by :func:`iris.load`, but the operation is substantially faster for input that is structured. The structured input files should conform to the following requirements: * the file must contain fields for all possible combinations of the vertical levels and time points found in the file. * the fields must occur in a regular repeating order within the file. (For example: a sequence of fields for NV vertical levels, repeated for NP different forecast periods, repeated for NT different forecast times). * all other metadata must be identical across all fields of the same phenomenon. Each group of fields with the same values of LBUSER4, LBUSER7 and LBPROC is identified as a separate phenomenon: These groups are processed independently and returned as separate result cubes. .. note:: Each input file is loaded independently. Thus a single result cube can not combine data from multiple input files. .. note:: The resulting time-related coordinates ('time', 'forecast_time' and 'forecast_period') may be mapped to shared cube dimensions and in some cases can also be multidimensional. However, the vertical level information *must* have a simple one-dimensional structure, independent of the time points, otherwise an error will be raised. .. note:: Where input data does *not* have a fully regular arrangement, the corresponding result cube will have a single anonymous extra dimension which indexes over all the input fields. This can happen if, for example, some fields are missing; or have slightly different metadata; or appear out of order in the file. .. warning:: Any non-regular metadata variation in the input should be strictly avoided, as not all irregularities are detected, which can cause erroneous results. """ warn_deprecated( "The module 'iris.experimental.fieldsfile' is deprecated. " "Please use the 'iris.fileformats.um.structured_um_loading' facility " "as a replacement." "\nA call to 'iris.experimental.fieldsfile.load' can be replaced with " "'iris.load_raw', within a 'structured_um_loading' context.") loader = Loader(_collations_from_filename, {}, _convert_collation, None) return CubeList(load_cubes(filenames, callback, loader, None))