コード例 #1
0
 def test_pp_with_stash_constraint(self):
     filenames = [tests.get_data_path(("PP", "globClim1", "dec_subset.pp"))]
     stcon = iris.AttributeConstraint(STASH="m01s00i004")
     pp_constraints = pp._convert_constraints(stcon)
     pp_loader = iris.fileformats.rules.Loader(pp.load, {}, convert)
     cubes = list(load_cubes(filenames, None, pp_loader, pp_constraints))
     self.assertEqual(len(cubes), 38)
コード例 #2
0
 def test_pp_no_constraint(self):
     filenames = [tests.get_data_path(('PP', 'globClim1', 'dec_subset.pp'))]
     pp_constraints = pp._convert_constraints(None)
     pp_loader = iris.fileformats.rules.Loader(pp.load, {},
                                               convert, pp._load_rules)
     cubes = list(load_cubes(filenames, None, pp_loader, pp_constraints))
     self.assertEqual(len(cubes), 152)
コード例 #3
0
ファイル: test_rules.py プロジェクト: RachelNorth/iris
    def test_simple_factory(self):
        # Test the creation process for a factory definition which only
        # uses simple dict arguments.

        # The fake PPField which will be supplied to our converter.
        field = Mock()
        field.data = None
        field_generator = lambda filename: [field]
        # A fake conversion function returning:
        #   1) A parameter cube needing a simple factory construction.
        aux_factory = Mock()
        factory = Mock()
        factory.args = [{'name': 'foo'}]
        factory.factory_class = lambda *args: \
            setattr(aux_factory, 'fake_args', args) or aux_factory
        def converter(cube, field):
            # Suppress the normal Cube.coord() method.
            cube.coord = lambda **args: args
            cube.add_aux_factory = lambda aux_factory: \
                setattr(cube, 'fake_aux_factory', aux_factory)
            return ([factory], [])
        # Finish by making a fake Loader
        fake_loader = Loader(field_generator, {}, converter, None)
        cubes = load_cubes(['fake_filename'], None, fake_loader)

        # Check the result is a generator with a single entry.
        self.assertIsInstance(cubes, types.GeneratorType)
        cubes = list(cubes)
        self.assertEqual(len(cubes), 1)
        # Check the "cube" has an "aux_factory" added, which itself
        # must have been created with the correct arguments.
        self.assertTrue(hasattr(cubes[0], 'fake_aux_factory'))
        self.assertIs(cubes[0].fake_aux_factory, aux_factory)
        self.assertTrue(hasattr(aux_factory, 'fake_args'))
        self.assertEqual(aux_factory.fake_args, ({'name': 'foo'},))
コード例 #4
0
 def test_pp_with_stash_constraint(self):
     filenames = [tests.get_data_path(("PP", "globClim1", "dec_subset.pp"))]
     stcon = iris.AttributeConstraint(STASH="m01s00i004")
     pp_constraints = pp._convert_constraints(stcon)
     pp_loader = iris.fileformats.rules.Loader(pp.load, {}, convert)
     cubes = list(load_cubes(filenames, None, pp_loader, pp_constraints))
     self.assertEqual(len(cubes), 38)
コード例 #5
0
ファイル: test_rules.py プロジェクト: MahatmaCane/iris
    def test_cross_reference(self):
        # Test the creation process for a factory definition which uses
        # a cross-reference.

        param_cube = stock.realistic_4d_no_derived()
        orog_coord = param_cube.coord('surface_altitude')
        param_cube.remove_coord(orog_coord)

        orog_cube = param_cube[0, 0, :, :]
        orog_cube.data = orog_coord.points
        orog_cube.rename('surface_altitude')
        orog_cube.units = orog_coord.units
        orog_cube.attributes = orog_coord.attributes

        # We're going to test for the presence of the hybrid height
        # stuff later, so let's make sure it's not already there!
        assert len(param_cube.aux_factories) == 0
        assert not param_cube.coords('surface_altitude')

        # The fake PPFields which will be supplied to our converter.
        press_field = Mock()
        press_field.data = param_cube.data
        orog_field = Mock()
        orog_field.data = orog_cube.data
        field_generator = lambda filename: [press_field, orog_field]
        # A fake rule set returning:
        #   1) A parameter cube needing an "orography" reference
        #   2) An "orography" cube

        def converter(field):
            if field is press_field:
                src = param_cube
                factories = [Factory(HybridHeightFactory,
                                     [Reference('orography')])]
                references = []
            else:
                src = orog_cube
                factories = []
                references = [ReferenceTarget('orography', None)]
            dim_coords_and_dims = [(coord, src.coord_dims(coord)[0])
                                   for coord in src.dim_coords]
            aux_coords_and_dims = [(coord, src.coord_dims(coord))
                                   for coord in src.aux_coords]
            return ConversionMetadata(factories, references, src.standard_name,
                                      src.long_name, src.units, src.attributes,
                                      src.cell_methods, dim_coords_and_dims,
                                      aux_coords_and_dims)
        # Finish by making a fake Loader
        fake_loader = Loader(field_generator, {}, converter, None)
        cubes = load_cubes(['fake_filename'], None, fake_loader)

        # Check the result is a generator containing two Cubes.
        self.assertIsInstance(cubes, types.GeneratorType)
        cubes = list(cubes)
        self.assertEqual(len(cubes), 2)
        # Check the "cube" has an "aux_factory" added, which itself
        # must have been created with the correct arguments.
        self.assertEqual(len(cubes[1].aux_factories), 1)
        self.assertEqual(len(cubes[1].coords('surface_altitude')), 1)
コード例 #6
0
 def test_pp_with_stash_constraints(self):
     filenames = [tests.get_data_path(('PP', 'globClim1', 'dec_subset.pp'))]
     stcon1 = iris.AttributeConstraint(STASH='m01s00i004')
     stcon2 = iris.AttributeConstraint(STASH='m01s00i010')
     pp_constraints = pp._convert_constraints([stcon1, stcon2])
     pp_loader = iris.fileformats.rules.Loader(pp.load, {}, convert)
     cubes = list(load_cubes(filenames, None, pp_loader, pp_constraints))
     self.assertEqual(len(cubes), 76)
コード例 #7
0
 def test_pp_with_stash_constraints(self):
     filenames = [tests.get_data_path(('PP', 'globClim1', 'dec_subset.pp'))]
     stcon1 = iris.AttributeConstraint(STASH='m01s00i004')
     stcon2 = iris.AttributeConstraint(STASH='m01s00i010')
     pp_constraints = pp._convert_constraints([stcon1, stcon2])
     pp_loader = iris.fileformats.rules.Loader(pp.load, {}, convert)
     cubes = list(load_cubes(filenames, None, pp_loader, pp_constraints))
     self.assertEqual(len(cubes), 76)
コード例 #8
0
ファイル: test_rules.py プロジェクト: payton1004/iris
    def test_cross_reference(self):
        # Test the creation process for a factory definition which uses
        # a cross-reference.

        param_cube = stock.realistic_4d_no_derived()
        orog_coord = param_cube.coord('surface_altitude')
        param_cube.remove_coord(orog_coord)

        orog_cube = param_cube[0, 0, :, :]
        orog_cube.data = orog_coord.points
        orog_cube.rename('surface_altitude')
        orog_cube.units = orog_coord.units
        orog_cube.attributes = orog_coord.attributes

        # We're going to test for the presence of the hybrid height
        # stuff later, so let's make sure it's not already there!
        assert len(param_cube.aux_factories) == 0
        assert not param_cube.coords('surface_altitude')

        # The fake PPFields which will be supplied to our converter.
        press_field = Mock()
        press_field.data = param_cube.data
        orog_field = Mock()
        orog_field.data = orog_cube.data
        field_generator = lambda filename: [press_field, orog_field]
        # A fake rule set returning:
        #   1) A parameter cube needing an "orography" reference
        #   2) An "orography" cube
        def converter(field):
            if field is press_field:
                src = param_cube
                factories = [Factory(HybridHeightFactory,
                                     [Reference('orography')])]
                references = []
            else:
                src = orog_cube
                factories = []
                references = [ReferenceTarget('orography', None)]
            dim_coords_and_dims = [(coord, src.coord_dims(coord)[0])
                                   for coord in src.dim_coords]
            aux_coords_and_dims = [(coord, src.coord_dims(coord))
                                   for coord in src.aux_coords]
            return (factories, references, src.standard_name, src.long_name,
                    src.units, src.attributes, src.cell_methods,
                    dim_coords_and_dims, aux_coords_and_dims)
        # Finish by making a fake Loader
        fake_loader = Loader(field_generator, {}, converter, None)
        cubes = load_cubes(['fake_filename'], None, fake_loader)

        # Check the result is a generator containing two Cubes.
        self.assertIsInstance(cubes, types.GeneratorType)
        cubes = list(cubes)
        self.assertEqual(len(cubes), 2)
        # Check the "cube" has an "aux_factory" added, which itself
        # must have been created with the correct arguments.
        self.assertEqual(len(cubes[1].aux_factories), 1)
        self.assertEqual(len(cubes[1].coords('surface_altitude')), 1)
コード例 #9
0
    def test_simple_factory(self):
        # Test the creation process for a factory definition which only
        # uses simple dict arguments.

        # Make a minimal fake data object that passes as lazy data.
        core_data_array = mock.Mock(compute=None, dtype=np.dtype("f4"))
        # Make a fake PPField which will be supplied to our converter.
        field = mock.Mock(
            core_data=mock.Mock(return_value=core_data_array),
            realised_dtype=np.dtype("f4"),
            bmdi=None,
        )

        def field_generator(filename):
            return [field]

        # A fake conversion function returning:
        #   1) A parameter cube needing a simple factory construction.
        aux_factory = mock.Mock()
        factory = mock.Mock()
        factory.args = [{"name": "foo"}]
        factory.factory_class = (lambda *args: setattr(
            aux_factory, "fake_args", args) or aux_factory)

        def converter(field):
            return ConversionMetadata([factory], [], "", "", "", {}, [], [],
                                      [])

        # Finish by making a fake Loader
        fake_loader = Loader(field_generator, {}, converter)
        cubes = load_cubes(["fake_filename"], None, fake_loader)

        # Check the result is a generator with a single entry.
        self.assertIsInstance(cubes, types.GeneratorType)
        try:
            # Suppress the normal Cube.coord() and Cube.add_aux_factory()
            # methods.
            coord_method = Cube.coord
            add_aux_factory_method = Cube.add_aux_factory
            Cube.coord = lambda self, **args: args
            Cube.add_aux_factory = lambda self, aux_factory: setattr(
                self, "fake_aux_factory", aux_factory)

            cubes = list(cubes)
        finally:
            Cube.coord = coord_method
            Cube.add_aux_factory = add_aux_factory_method
        self.assertEqual(len(cubes), 1)
        # Check the "cube" has an "aux_factory" added, which itself
        # must have been created with the correct arguments.
        self.assertTrue(hasattr(cubes[0], "fake_aux_factory"))
        self.assertIs(cubes[0].fake_aux_factory, aux_factory)
        self.assertTrue(hasattr(aux_factory, "fake_args"))
        self.assertEqual(aux_factory.fake_args, ({"name": "foo"}, ))
コード例 #10
0
ファイル: test_rules.py プロジェクト: ahill818/iris
    def test_cross_reference(self):
        # Test the creation process for a factory definition which uses
        # a cross-reference.

        param_cube = stock.realistic_4d_no_derived()
        orog_coord = param_cube.coord('surface_altitude')
        param_cube.remove_coord(orog_coord)

        orog_cube = param_cube[0, 0, :, :]
        orog_cube.data = orog_coord.points
        orog_cube.rename('surface_altitude')
        orog_cube.units = orog_coord.units
        orog_cube.attributes = orog_coord.attributes

        # We're going to test for the presence of the hybrid height
        # stuff later, so let's make sure it's not already there!
        assert len(param_cube.aux_factories) == 0
        assert not param_cube.coords('surface_altitude')

        press_field = Mock()
        orog_field = Mock()
        field_generator = lambda filename: [press_field, orog_field]
        # A fake rule set returning:
        #   1) A parameter cube needing an "orography" reference
        #   2) An "orography" cube
        factory = Factory(HybridHeightFactory, [Reference('orography')])
        press_rule_result = RuleResult(param_cube, Mock(), [factory])
        orog_rule_result= RuleResult(orog_cube, Mock(), [])
        rules = Mock()
        rules.result = lambda field: \
            press_rule_result if field is press_field else orog_rule_result
        # A fake cross-reference rule set
        ref = ReferenceTarget('orography', None)
        orog_xref_rule = Mock()
        orog_xref_rule.run_actions = lambda cube, field: (ref,)
        xref_rules = Mock()
        xref_rules.matching_rules = lambda field: \
            [orog_xref_rule] if field is orog_field else []
        # Finish by making a fake Loader
        name = 'FAKE_PP'
        fake_loader = Loader(field_generator, {}, rules, xref_rules, name)
        cubes = load_cubes(['fake_filename'], None, fake_loader)
        # Check the result is a generator containing both of our cubes.
        self.assertIsInstance(cubes, types.GeneratorType)
        cubes = list(cubes)
        self.assertEqual(len(cubes), 2)
        self.assertIs(cubes[0], orog_cube)
        self.assertIs(cubes[1], param_cube)
        # Check the "cube" has an "aux_factory" added, which itself
        # must have been created with the correct arguments.
        self.assertEqual(len(param_cube.aux_factories), 1)
        self.assertEqual(len(param_cube.coords('surface_altitude')), 1)
コード例 #11
0
ファイル: test_rules.py プロジェクト: nleush/iris
    def test_cross_reference(self):
        # Test the creation process for a factory definition which uses
        # a cross-reference.

        param_cube = stock.realistic_4d_no_derived()
        orog_coord = param_cube.coord('surface_altitude')
        param_cube.remove_coord(orog_coord)

        orog_cube = param_cube[0, 0, :, :]
        orog_cube.data = orog_coord.points
        orog_cube.rename('surface_altitude')
        orog_cube.units = orog_coord.units
        orog_cube.attributes = orog_coord.attributes

        # We're going to test for the presence of the hybrid height
        # stuff later, so let's make sure it's not already there!
        assert len(param_cube.aux_factories) == 0
        assert not param_cube.coords('surface_altitude')

        press_field = Mock()
        orog_field = Mock()
        field_generator = lambda filename: [press_field, orog_field]
        # A fake rule set returning:
        #   1) A parameter cube needing an "orography" reference
        #   2) An "orography" cube
        factory = Factory(HybridHeightFactory, [Reference('orography')])
        press_rule_result = RuleResult(param_cube, Mock(), [factory])
        orog_rule_result = RuleResult(orog_cube, Mock(), [])
        rules = Mock()
        rules.result = lambda field: \
            press_rule_result if field is press_field else orog_rule_result
        # A fake cross-reference rule set
        ref = ReferenceTarget('orography', None)
        orog_xref_rule = Mock()
        orog_xref_rule.run_actions = lambda cube, field: (ref, )
        xref_rules = Mock()
        xref_rules.matching_rules = lambda field: \
            [orog_xref_rule] if field is orog_field else []
        # Finish by making a fake Loader
        name = 'FAKE_PP'
        fake_loader = Loader(field_generator, {}, rules, xref_rules, name)
        cubes = load_cubes(['fake_filename'], None, fake_loader)
        # Check the result is a generator containing both of our cubes.
        self.assertIsInstance(cubes, types.GeneratorType)
        cubes = list(cubes)
        self.assertEqual(len(cubes), 2)
        self.assertIs(cubes[0], orog_cube)
        self.assertIs(cubes[1], param_cube)
        # Check the "cube" has an "aux_factory" added, which itself
        # must have been created with the correct arguments.
        self.assertEqual(len(param_cube.aux_factories), 1)
        self.assertEqual(len(param_cube.coords('surface_altitude')), 1)
コード例 #12
0
ファイル: test_rules.py プロジェクト: SciTools/iris
    def test_simple_factory(self):
        # Test the creation process for a factory definition which only
        # uses simple dict arguments.

        # Make a minimal fake data object that passes as lazy data.
        core_data_array = mock.Mock(compute=None, dtype=np.dtype('f4'))
        # Make a fake PPField which will be supplied to our converter.
        field = mock.Mock(core_data=mock.Mock(return_value=core_data_array),
                          realised_dtype=np.dtype('f4'),
                          bmdi=None)

        def field_generator(filename):
            return [field]

        # A fake conversion function returning:
        #   1) A parameter cube needing a simple factory construction.
        aux_factory = mock.Mock()
        factory = mock.Mock()
        factory.args = [{'name': 'foo'}]
        factory.factory_class = lambda *args: \
            setattr(aux_factory, 'fake_args', args) or aux_factory

        def converter(field):
            return ConversionMetadata([factory], [], '', '', '', {}, [], [],
                                      [])
        # Finish by making a fake Loader
        fake_loader = Loader(field_generator, {}, converter)
        cubes = load_cubes(['fake_filename'], None, fake_loader)

        # Check the result is a generator with a single entry.
        self.assertIsInstance(cubes, types.GeneratorType)
        try:
            # Suppress the normal Cube.coord() and Cube.add_aux_factory()
            # methods.
            coord_method = Cube.coord
            add_aux_factory_method = Cube.add_aux_factory
            Cube.coord = lambda self, **args: args
            Cube.add_aux_factory = lambda self, aux_factory: \
                setattr(self, 'fake_aux_factory', aux_factory)

            cubes = list(cubes)
        finally:
            Cube.coord = coord_method
            Cube.add_aux_factory = add_aux_factory_method
        self.assertEqual(len(cubes), 1)
        # Check the "cube" has an "aux_factory" added, which itself
        # must have been created with the correct arguments.
        self.assertTrue(hasattr(cubes[0], 'fake_aux_factory'))
        self.assertIs(cubes[0].fake_aux_factory, aux_factory)
        self.assertTrue(hasattr(aux_factory, 'fake_args'))
        self.assertEqual(aux_factory.fake_args, ({'name': 'foo'},))
コード例 #13
0
    def test_simple_factory(self):
        # Test the creation process for a factory definition which only
        # uses simple dict arguments.

        # The fake PPField which will be supplied to our converter.
        field = Mock()
        field.data = None

        def field_generator(filename):
            return [field]

        # A fake conversion function returning:
        #   1) A parameter cube needing a simple factory construction.
        aux_factory = Mock()
        factory = Mock()
        factory.args = [{'name': 'foo'}]
        factory.factory_class = lambda *args: \
            setattr(aux_factory, 'fake_args', args) or aux_factory

        def converter(field):
            return ConversionMetadata([factory], [], '', '', '', {}, [], [],
                                      [])
        # Finish by making a fake Loader
        fake_loader = Loader(field_generator, {}, converter, None)
        cubes = load_cubes(['fake_filename'], None, fake_loader)

        # Check the result is a generator with a single entry.
        self.assertIsInstance(cubes, types.GeneratorType)
        try:
            # Suppress the normal Cube.coord() and Cube.add_aux_factory()
            # methods.
            coord_method = Cube.coord
            add_aux_factory_method = Cube.add_aux_factory
            Cube.coord = lambda self, **args: args
            Cube.add_aux_factory = lambda self, aux_factory: \
                setattr(self, 'fake_aux_factory', aux_factory)

            cubes = list(cubes)
        finally:
            Cube.coord = coord_method
            Cube.add_aux_factory = add_aux_factory_method
        self.assertEqual(len(cubes), 1)
        # Check the "cube" has an "aux_factory" added, which itself
        # must have been created with the correct arguments.
        self.assertTrue(hasattr(cubes[0], 'fake_aux_factory'))
        self.assertIs(cubes[0].fake_aux_factory, aux_factory)
        self.assertTrue(hasattr(aux_factory, 'fake_args'))
        self.assertEqual(aux_factory.fake_args, ({'name': 'foo'},))
コード例 #14
0
ファイル: __init__.py プロジェクト: bjlittle/iris-grib
def load_cubes(filenames, callback=None, auto_regularise=True):
    """
    Returns a generator of cubes from the given list of filenames.

    Args:

    * filenames (string/list):
        One or more GRIB filenames to load from.

    Kwargs:

    * callback (callable function):
        Function which can be passed on to :func:`iris.io.run_callback`.

    * auto_regularise (*True* | *False*):
        If *True*, any cube defined on a reduced grid will be interpolated
        to an equivalent regular grid. If *False*, any cube defined on a
        reduced grid will be loaded on the raw reduced grid with no shape
        information. If `iris.FUTURE.strict_grib_load` is `True` then this
        keyword has no effect, raw grids are always used. If the older GRIB
        loader is in use then the default behaviour is to interpolate cubes
        on a reduced grid to an equivalent regular grid.

        .. deprecated:: 1.8. Please use strict_grib_load and regrid instead.


    """
    if iris.FUTURE.strict_grib_load:
        grib_loader = iris_rules.Loader(
            GribMessage.messages_from_filename,
            {},
            old_load_convert)
    else:
        if auto_regularise is not None:
            # The old loader supports the auto_regularise keyword, but in
            # deprecation mode, so warning if it is found.
            msg = ('the`auto_regularise` kwarg is deprecated and '
                   'will be removed in a future release. Resampling '
                   'quasi-regular grids on load will no longer be '
                   'available.  Resampling should be done on the '
                   'loaded cube instead using Cube.regrid.')
            warn_deprecated(msg)

        grib_loader = iris_rules.Loader(
            grib_generator, {'auto_regularise': auto_regularise},
            new_load_convert)
    return iris_rules.load_cubes(filenames, callback, grib_loader)
コード例 #15
0
def load_cubes(filenames, callback=None):
    """
    Returns a generator of cubes from the given list of filenames.

    Args:

    * filenames (string/list):
        One or more GRIB filenames to load from.

    Kwargs:

    * callback (callable function):
        Function which can be passed on to :func:`iris.io.run_callback`.

    """
    import iris.fileformats.rules as iris_rules
    grib_loader = iris_rules.Loader(_load_generate, {}, load_convert)
    return iris_rules.load_cubes(filenames, callback, grib_loader)
コード例 #16
0
ファイル: __init__.py プロジェクト: lbdreyer/iris-grib
def load_cubes(filenames, callback=None):
    """
    Returns a generator of cubes from the given list of filenames.

    Args:

    * filenames (string/list):
        One or more GRIB filenames to load from.

    Kwargs:

    * callback (callable function):
        Function which can be passed on to :func:`iris.io.run_callback`.

    """
    grib_loader = iris_rules.Loader(_load_generate,
                                    {},
                                    load_convert)
    return iris_rules.load_cubes(filenames, callback, grib_loader)
コード例 #17
0
ファイル: test_rules.py プロジェクト: ahill818/iris
 def test_simple_factory(self):
     # Test the creation process for a factory definition which only
     # uses simple dict arguments.
     field = Mock()
     field_generator = lambda filename: [field]
     # A fake rule set returning:
     #   1) A parameter cube needing a simple factory construction.
     src_cube = Mock()
     src_cube.coord = lambda **args: args
     src_cube.add_aux_factory = lambda aux_factory: \
         setattr(src_cube, 'fake_aux_factory', aux_factory)
     aux_factory = Mock()
     factory = Mock()
     factory.args = [{'name': 'foo'}]
     factory.factory_class = lambda *args: \
         setattr(aux_factory, 'fake_args', args) or aux_factory
     rule_result = RuleResult(src_cube, Mock(), [factory])
     rules = Mock()
     rules.result = lambda field: rule_result
     # A fake cross-reference rule set
     xref_rules = Mock()
     xref_rules.matching_rules = lambda field: []
     # Finish by making a fake Loader
     name = 'FAKE_PP'
     fake_loader = Loader(field_generator, {}, rules, xref_rules, name)
     cubes = load_cubes(['fake_filename'], None, fake_loader)
     # Check the result is a generator with our "cube" as the only
     # entry.
     self.assertIsInstance(cubes, types.GeneratorType)
     cubes = list(cubes)
     self.assertEqual(len(cubes), 1)
     self.assertIs(cubes[0], src_cube)
     # Check the "cube" has an "aux_factory" added, which itself
     # must have been created with the correct arguments.
     self.assertTrue(hasattr(src_cube, 'fake_aux_factory'))
     self.assertIs(src_cube.fake_aux_factory, aux_factory)
     self.assertTrue(hasattr(aux_factory, 'fake_args'))
     self.assertEqual(aux_factory.fake_args, ({'name': 'foo'},))
コード例 #18
0
ファイル: test_rules.py プロジェクト: nleush/iris
 def test_simple_factory(self):
     # Test the creation process for a factory definition which only
     # uses simple dict arguments.
     field = Mock()
     field_generator = lambda filename: [field]
     # A fake rule set returning:
     #   1) A parameter cube needing a simple factory construction.
     src_cube = Mock()
     src_cube.coord = lambda **args: args
     src_cube.add_aux_factory = lambda aux_factory: \
         setattr(src_cube, 'fake_aux_factory', aux_factory)
     aux_factory = Mock()
     factory = Mock()
     factory.args = [{'name': 'foo'}]
     factory.factory_class = lambda *args: \
         setattr(aux_factory, 'fake_args', args) or aux_factory
     rule_result = RuleResult(src_cube, Mock(), [factory])
     rules = Mock()
     rules.result = lambda field: rule_result
     # A fake cross-reference rule set
     xref_rules = Mock()
     xref_rules.matching_rules = lambda field: []
     # Finish by making a fake Loader
     name = 'FAKE_PP'
     fake_loader = Loader(field_generator, {}, rules, xref_rules, name)
     cubes = load_cubes(['fake_filename'], None, fake_loader)
     # Check the result is a generator with our "cube" as the only
     # entry.
     self.assertIsInstance(cubes, types.GeneratorType)
     cubes = list(cubes)
     self.assertEqual(len(cubes), 1)
     self.assertIs(cubes[0], src_cube)
     # Check the "cube" has an "aux_factory" added, which itself
     # must have been created with the correct arguments.
     self.assertTrue(hasattr(src_cube, 'fake_aux_factory'))
     self.assertIs(src_cube.fake_aux_factory, aux_factory)
     self.assertTrue(hasattr(aux_factory, 'fake_args'))
     self.assertEqual(aux_factory.fake_args, ({'name': 'foo'}, ))
コード例 #19
0
ファイル: __init__.py プロジェクト: cpelley/iris
def load_cubes(filenames, callback=None):
    """
    Returns a generator of cubes from the given list of filenames.

    Args:

    * filenames:
        One or more GRIB filenames to load from.

    Kwargs:

    * callback:
        Function which can be passed on to :func:`iris.io.run_callback`.

    Returns:
        A generator containing Iris cubes loaded from the GRIB files.

    """
    import iris.fileformats.rules as iris_rules
    grib_loader = iris_rules.Loader(_load_generate,
                                    {},
                                    load_convert)
    return iris_rules.load_cubes(filenames, callback, grib_loader)
コード例 #20
0
def load(filenames, callback=None):
    """
    Load structured FieldsFiles and PP files.

    Args:

    * filenames:
        One or more filenames.


    Kwargs:

    * callback:
        A modifier/filter function. Please see the module documentation
        for :mod:`iris`.

        .. note::

            Unlike the standard :func:`iris.load` operation, the callback is
            applied to the final result cubes, not individual input fields.

    Returns:
        An :class:`iris.cube.CubeList`.


    This is a streamlined load operation, to be used only on fieldsfiles or PP
    files whose fields repeat regularly over the same vertical levels and
    times. The results aim to be equivalent to those generated by
    :func:`iris.load`, but the operation is substantially faster for input that
    is structured.

    The structured input files should conform to the following requirements:

    *  the file must contain fields for all possible combinations of the
       vertical levels and time points found in the file.

    *  the fields must occur in a regular repeating order within the file.

       (For example: a sequence of fields for NV vertical levels, repeated
       for NP different forecast periods, repeated for NT different forecast
       times).

    *  all other metadata must be identical across all fields of the same
       phenomenon.

    Each group of fields with the same values of LBUSER4, LBUSER7 and LBPROC
    is identified as a separate phenomenon:  These groups are processed
    independently and returned as separate result cubes.

    .. note::

        Each input file is loaded independently.  Thus a single result cube can
        not combine data from multiple input files.

    .. note::

        The resulting time-related coordinates ('time', 'forecast_time' and
        'forecast_period') may be mapped to shared cube dimensions and in some
        cases can also be multidimensional.  However, the vertical level
        information *must* have a simple one-dimensional structure, independent
        of the time points, otherwise an error will be raised.

    .. note::

        Where input data does *not* have a fully regular arrangement, the
        corresponding result cube will have a single anonymous extra dimension
        which indexes over all the input fields.

        This can happen if, for example, some fields are missing; or have
        slightly different metadata; or appear out of order in the file.

    .. warning::

        Any non-regular metadata variation in the input should be strictly
        avoided, as not all irregularities are detected, which can cause
        erroneous results.


    """
    warn_deprecated(
        "The module 'iris.experimental.fieldsfile' is deprecated. "
        "Please use the 'iris.fileformats.um.structured_um_loading' facility "
        "as a replacement."
        "\nA call to 'iris.experimental.fieldsfile.load' can be replaced with "
        "'iris.load_raw', within a 'structured_um_loading' context.")
    loader = Loader(_collations_from_filename, {}, _convert_collation, None)
    return CubeList(load_cubes(filenames, callback, loader, None))
コード例 #21
0
ファイル: fieldsfile.py プロジェクト: carlocafaro89/iris
def load(filenames, callback=None):
    """
    Load structured FieldsFiles.

    Args:

    * filenames:
        One or more filenames.


    Kwargs:

    * callback:
        A modifier/filter function. Please see the module documentation
        for :mod:`iris`.

        .. note::

            Unlike the standard :func:`iris.load` operation, the callback is
            applied to the final result cubes, not individual input fields.

    Returns:
        An :class:`iris.cube.CubeList`.


    This is a streamlined load operation, to be used only on fieldsfiles whose
    fields repeat regularly over the same vertical levels and times.
    The results are equivalent to those from :func:`iris.load`, but operation
    is substantially faster for suitable input.

    The input files should conform to the following requirements:

    *  the file must contain fields for all possible combinations of the
       vertical levels and time points found in the file.

    *  the fields must occur in a regular repeating order within the file.

       (For example: a sequence of fields for NV vertical levels, repeated
       for NP different forecast periods, repeated for NT different forecast
       times).

    *  all other metadata must be identical across all fields of the same
       phenomenon.

    Each group of fields with the same values of LBUSER4, LBUSER7 and LBPROC
    is identified as a separate phenomenon:  These groups are processed
    independently and returned as separate result cubes.

    .. note::

        Each input file is loaded independently.  Thus a single result cube can
        not combine data from multiple input files.

    .. note::

        The resulting time-related cordinates ('time', 'forecast_time' and
        'forecast_period') may be mapped to shared cube dimensions and in some
        cases can also be multidimensional.  However, the vertical level
        information *must* have a simple one-dimensional structure, independent
        of the time points, otherwise an error will be raised.

    .. note::

        Where input data does *not* have a fully regular arrangement, the
        corresponding result cube will have a single anonymous extra dimension
        which indexes over all the input fields.

        This can happen if, for example, some fields are missing; or have
        slightly different metadata; or appear out of order in the file.

    .. warning::

        Any non-regular metadata variation in the input should be strictly
        avoided, as not all irregularities are detected, which can cause
        erroneous results.

    """
    loader = Loader(_collations_from_filename, {}, _convert_collation, None)
    return CubeList(load_cubes(filenames, callback, loader, None))