def get_cube(url, name_list, bbox=None, time=None, units=None, callback=None,
             constraint=None):
    """Only `url` and `name_list` are mandatory.  The kw args are:
    `bbox`, `callback`, `time`, `units`, `constraint`."""

    cubes = iris.load_raw(url, callback=callback)

    in_list = lambda cube: cube.standard_name in name_list
    cubes = CubeList([cube for cube in cubes if in_list(cube)])
    if not cubes:
        raise ValueError('Cube does not contain {!r}'.format(name_list))
    else:
        cube = cubes.merge_cube()

    if constraint:
        cube = cube.extract(constraint)
        if not cube:
            raise ValueError('No cube using {!r}'.format(constraint))
    if bbox:
        cube = subset(cube, bbox)
        if not cube:
            raise ValueError('No cube using {!r}'.format(bbox))
    if time:
        if isinstance(time, datetime):
            start, stop = time, None
        elif isinstance(time, tuple):
            start, stop = time[0], time[1]
        else:
            raise ValueError('Time must be start or (start, stop).'
                             '  Got {!r}'.format(time))
        cube = time_slice(cube, start, stop)
    if units:
        if cube.units != units:
            cube.convert_units(units)
    return cube
Beispiel #2
0
def get_cube(url, name_list=None, bbox=None, callback=None,
             time=None, units=None, constraint=None):
    cubes = iris.load_raw(url, callback=callback)
    if constraint:
        cubes = cubes.extract(constraint)
    if name_list:
        in_list = lambda cube: cube.standard_name in name_list
        cubes = CubeList([cube for cube in cubes if in_list(cube)])
        if not cubes:
            raise ValueError('Cube does not contain {!r}'.format(name_list))
        else:
            cube = cubes.merge_cube()
    if bbox:
        cube = intersection(cube, bbox)
    if time:
        if isinstance(time, datetime):
            start, stop = time, None
        elif isinstance(time, tuple):
            start, stop = time[0], time[1]
        else:
            raise ValueError('Time must be start or (start, stop).'
                             '  Got {!r}'.format(time))
        cube = time_slice(cube, start, stop)
    if units:
        if not cube.units == units:
            cube.convert_units(units)
    return cube
Beispiel #3
0
def _add_levels(cube, levels=13):
    clist = CubeList()
    for level in range(levels):
        mln = DimCoord(level, standard_name='model_level_number')
        other = cube.copy()
        other.add_aux_coord(mln)
        clist.append(other)
    return clist.merge_cube()
Beispiel #4
0
 def test_realise_data(self):
     # Simply check that calling CubeList.realise_data is calling
     # _lazy_data.co_realise_cubes.
     mock_cubes_list = [mock.Mock(ident=count) for count in range(3)]
     test_cubelist = CubeList(mock_cubes_list)
     call_patch = self.patch('iris._lazy_data.co_realise_cubes')
     test_cubelist.realise_data()
     # Check it was called once, passing cubes as *args.
     self.assertEqual(call_patch.call_args_list,
                      [mock.call(*mock_cubes_list)])
Beispiel #5
0
class Test_xml(tests.IrisTest):
    def setUp(self):
        self.cubes = CubeList([Cube(np.arange(3)),
                               Cube(np.arange(3))])

    def test_byteorder_default(self):
        self.assertIn('byteorder', self.cubes.xml())

    def test_byteorder_false(self):
        self.assertNotIn('byteorder', self.cubes.xml(byteorder=False))

    def test_byteorder_true(self):
        self.assertIn('byteorder', self.cubes.xml(byteorder=True))
Beispiel #6
0
    def _create_cube(self, filenames, variable):
        import numpy as np
        from cis.data_io.hdf import _read_hdf4
        from cis.data_io import hdf_vd
        from iris.cube import Cube, CubeList
        from iris.coords import DimCoord, AuxCoord
        from cis.time_util import calculate_mid_time, cis_standard_time_unit
        from cis.data_io.hdf_sd import get_metadata
        from cf_units import Unit

        variables = ['XDim:GlobalGrid', 'YDim:GlobalGrid', variable]
        logging.info("Listing coordinates: " + str(variables))

        cube_list = CubeList()
        # Read each file individually, let Iris do the merging at the end.
        for f in filenames:
            sdata, vdata = _read_hdf4(f, variables)

            lat_points = np.linspace(-90., 90., hdf_vd.get_data(vdata['YDim:GlobalGrid']))
            lon_points = np.linspace(-180., 180., hdf_vd.get_data(vdata['XDim:GlobalGrid']))

            lat_coord = DimCoord(lat_points, standard_name='latitude', units='degrees')
            lon_coord = DimCoord(lon_points, standard_name='longitude', units='degrees')

            # create time coordinate using the midpoint of the time delta between the start date and the end date
            start_datetime = self._get_start_date(f)
            end_datetime = self._get_end_date(f)
            mid_datetime = calculate_mid_time(start_datetime, end_datetime)
            logging.debug("Using {} as datetime for file {}".format(mid_datetime, f))
            time_coord = AuxCoord(mid_datetime, standard_name='time', units=cis_standard_time_unit,
                                  bounds=[start_datetime, end_datetime])

            var = sdata[variable]
            metadata = get_metadata(var)

            try:
                units = Unit(metadata.units)
            except ValueError:
                logging.warning("Unable to parse units '{}' in {} for {}.".format(metadata.units, f, variable))
                units = None

            cube = Cube(_get_MODIS_SDS_data(sdata[variable]),
                        dim_coords_and_dims=[(lon_coord, 1), (lat_coord, 0)],
                        aux_coords_and_dims=[(time_coord, None)],
                        var_name=metadata._name, long_name=metadata.long_name, units=units)

            cube_list.append(cube)

        # Merge the cube list across the scalar time coordinates before returning a single cube.
        return cube_list.merge_cube()
Beispiel #7
0
 def test_orthogonal_with_realization(self):
     # => fp: 2; rt: 2; t: 2; realization: 2
     triples = ((0, 10, 1),
                (0, 10, 2),
                (0, 11, 1),
                (0, 11, 2),
                (1, 10, 1),
                (1, 10, 2),
                (1, 11, 1),
                (1, 11, 2))
     en1_cubes = [self._make_cube(*triple, realization=1) for
                  triple in triples]
     en2_cubes = [self._make_cube(*triple, realization=2) for
                  triple in triples]
     cubes = CubeList(en1_cubes) + CubeList(en2_cubes)
     cube, = cubes.merge()
     self.assertCML(cube, checksum=False)
Beispiel #8
0
 def test_combination_with_realization(self):
     # => fp, rt, t: 8; realization: 2
     triples = ((0, 10, 1),
                (0, 10, 2),
                (0, 11, 1),
                (0, 11, 3),  # This '3' breaks the pattern.
                (1, 10, 1),
                (1, 10, 2),
                (1, 11, 1),
                (1, 11, 2))
     en1_cubes = [self._make_cube(*triple, realization=1) for
                  triple in triples]
     en2_cubes = [self._make_cube(*triple, realization=2) for
                  triple in triples]
     cubes = CubeList(en1_cubes) + CubeList(en2_cubes)
     cube, = cubes.merge()
     self.assertCML(cube, checksum=False)
Beispiel #9
0
 def test_combination_with_extra_triple(self):
     # => fp, rt, t, realization: 17
     triples = ((0, 10, 1),
                (0, 10, 2),
                (0, 11, 1),
                (0, 11, 2),
                (1, 10, 1),
                (1, 10, 2),
                (1, 11, 1),
                (1, 11, 2))
     en1_cubes = [self._make_cube(*triple, realization=1) for
                  triple in triples]
     # Add extra time triple on the end.
     en2_cubes = [self._make_cube(*triple, realization=2) for
                  triple in triples + ((1, 11, 3),)]
     cubes = CubeList(en1_cubes) + CubeList(en2_cubes)
     cube, = cubes.merge()
     self.assertCML(cube, checksum=False)
Beispiel #10
0
 def test_combination_with_extra_realization(self):
     # => fp, rt, t, realization: 17
     triples = ((0, 10, 1),
                (0, 10, 2),
                (0, 11, 1),
                (0, 11, 2),
                (1, 10, 1),
                (1, 10, 2),
                (1, 11, 1),
                (1, 11, 2))
     en1_cubes = [self._make_cube(*triple, realization=1) for
                  triple in triples]
     en2_cubes = [self._make_cube(*triple, realization=2) for
                  triple in triples]
     # Add extra that is a duplicate of one of the time triples
     # but with a different realisation.
     en3_cubes = [self._make_cube(0, 10, 2, realization=3)]
     cubes = CubeList(en1_cubes) + CubeList(en2_cubes) + CubeList(en3_cubes)
     cube, = cubes.merge()
     self.assertCML(cube, checksum=False)
Beispiel #11
0
class Test_extract(tests.IrisTest):
    def setUp(self):
        self.scalar_cubes = CubeList()
        for i in range(5):
            for letter in 'abcd':
                self.scalar_cubes.append(Cube(i, long_name=letter))

    def test_scalar_cube_name_constraint(self):
        # Test the name based extraction of a CubeList containing scalar cubes.
        res = self.scalar_cubes.extract('a')
        expected = CubeList([Cube(i, long_name='a') for i in range(5)])
        self.assertEqual(res, expected)

    def test_scalar_cube_data_constraint(self):
        # Test the extraction of a CubeList containing scalar cubes
        # when using a cube_func.
        val = 2
        constraint = iris.Constraint(cube_func=lambda c: c.data == val)
        res = self.scalar_cubes.extract(constraint)
        expected = CubeList([Cube(val, long_name=letter) for letter in 'abcd'])
        self.assertEqual(res, expected)
Beispiel #12
0
 def test_FAIL_phenomena_nostash(self):
     # If we remove the 'STASH' attributes, certain phenomena can still be
     # successfully encoded+decoded by standard load using LBFC values.
     # Structured loading gets this wrong, because it does not use LBFC in
     # characterising phenomena.
     flds = self.fields(c_t='1122', phn='0101')
     for fld in flds:
         del fld.attributes['STASH']
     file = self.save_fieldcubes(flds)
     results = iris.load(file)
     if not self.do_fast_loads:
         # This is what we'd LIKE to get (what iris.load gives).
         expected = CubeList(flds).merge()
     else:
         # At present, we get a cube incorrectly combined together over all
         # 4 timepoints, with the same phenomenon for all (!wrong!).
         # It's a bit tricky to arrange the existing data like that.
         # Do it by hacking the time values to allow merge, and then fixing
         # up the time
         old_t1, old_t2 = (fld.coord('time').points[0]
                           for fld in (flds[0], flds[2]))
         for i_fld, fld in enumerate(flds):
             # Hack the phenomena to all look like the first one.
             fld.rename('air_temperature')
             fld.units = 'K'
             # Hack the time points so the 4 cube can merge into one.
             fld.coord('time').points = [old_t1 + i_fld]
         one_cube = CubeList(flds).merge_cube()
         # Replace time dim with an anonymous dim.
         co_t_fake = one_cube.coord('time')
         one_cube.remove_coord(co_t_fake)
         # Reconstruct + add back the expected auxiliary time coord.
         co_t_new = AuxCoord([old_t1, old_t1, old_t2, old_t2],
                             standard_name='time', units=co_t_fake.units)
         one_cube.add_aux_coord(co_t_new, 0)
         expected = [one_cube]
     self.assertEqual(results, expected)
 def test_empty_cube_list(self):
     """Tests that an error is raised if there is an empty list."""
     msg = "Expected 4"
     with self.assertRaisesRegex(ValueError, msg):
         PhaseChangeLevel(phase_change="snow-sleet").process(CubeList([]))
Beispiel #14
0
 def test_empty_cube_list(self):
     """Tests that an error is raised if there is an empty list."""
     msg = "Expected 3"
     with self.assertRaisesRegex(ValueError, msg):
         WetBulbTemperature().process(CubeList([]))
Beispiel #15
0
def run_spotdata(diagnostics,
                 ancillary_data,
                 sites,
                 config_constants,
                 use_multiprocessing=False):
    """
    A routine that calls the components of the spotdata code. This includes
    building site data into a suitable format, finding grid neighbours to
    those sites with the chosen method, and then extracting data with the
    chosen method. The final results are written out to new irregularly
    gridded iris.cube.Cubes.

    Args:
        diagnostics (dict):
            Dictionary containing the information regarding the methods that
            will be applied for a specific diagnostic, as well as the data
            following loading in a cube, and any additional data required to
            be able to compute the methods requested.

            For example::

              {
                  "temperature": {
                      "diagnostic_name": "air_temperature",
                      "extrema": True,
                      "filepath": "temperature_at_screen_level",
                      "interpolation_method": "use_nearest",
                      "neighbour_finding": {
                          "land_constraint": False,
                          "method": "fast_nearest_neighbour",
                          "vertical_bias": None
                      "data": iris.cube.CubeList
                      "additional_data" : iris.cube.CubeList
                      }
                  }
              }

        ancillary_data (dict):
            Dictionary containing named ancillary data; the key gives the name
            and the item is the iris.cube.Cube of data.

        sites (dict):
            Contains:

            latitudes (list of ints/floats or None):
                A list of latitudes for running for a custom set of
                sites. The order should correspond to the subsequent latitudes
                and altitudes variables to construct each site.

            longitudes (list of ints/floats or None):
                A list of longitudes for running for a custom set of
                sites.

            altitudes (list of ints/floats or None):
                A list of altitudes for running for a custom set of
                sites.

            site_ids (list of ints or None):
                A list of site_ids to associate with the above
                constructed sites. This must be ordered the same as the
                latitudes/longitudes/altitudes lists.

        config_constants (dict):
            Dictionary defining constants to be used in methods that have
            tolerances that may be set. e.g. maximum vertical extrapolation/
            interpolation of temperatures using a temperature lapse rate
            method.

        use_multiprocessing (boolean):
            A switch determining whether to use multiprocessing in the data
            extraction step.

    Returns:
        (tuple): tuple containing:
            **resulting_cube** (iris.cube.Cube or None):
                Cube after extracting the diagnostic requested using the
                desired extraction method.
                None is returned if the "resulting_cubes" is an empty CubeList
                after processing.
            **extrema_cubes** (iris.cube.CubeList or None):
                CubeList containing extrema values, if the 'extrema' diagnostic
                is requested.
                None is returned if the value for diagnostic_dict["extrema"]
                is False, so that the extrema calculation is not required.
    """
    # Read in constants to use; if not available, defaults will be used.
    neighbour_kwargs = {}
    if config_constants is not None:
        no_neighbours = config_constants.get('no_neighbours')
        if no_neighbours is not None:
            neighbour_kwargs['no_neighbours'] = no_neighbours

    # Add configuration constants to ancillaries (may be None if unset).
    ancillary_data['config_constants'] = config_constants

    # Set up site-grid point neighbour list using default method. Other IGPS
    # methods will use this as a starting point so it must always be done.
    # Assumes orography file is on the same grid as the diagnostic data.
    neighbours = {}
    default_neighbours = {
        'method': 'fast_nearest_neighbour',
        'vertical_bias': None,
        'land_constraint': False
    }
    default_hash = construct_neighbour_hash(default_neighbours)
    neighbours[default_hash] = PointSelection(**default_neighbours).process(
        ancillary_data['orography'],
        sites,
        ancillary_data=ancillary_data,
        **neighbour_kwargs)

    # Set up site-grid point neighbour lists for all IGPS methods being used.
    for key in diagnostics.keys():
        neighbour_finding = diagnostics[key]['neighbour_finding']
        neighbour_hash = construct_neighbour_hash(neighbour_finding)
        # Check if defined neighbour method results already exist.
        if neighbour_hash not in neighbours.keys():
            # If not, find neighbours with new method.
            neighbours[neighbour_hash] = (PointSelection(
                **neighbour_finding).process(
                    ancillary_data['orography'],
                    sites,
                    ancillary_data=ancillary_data,
                    default_neighbours=neighbours[default_hash],
                    **neighbour_kwargs))

    if use_multiprocessing:
        # Process diagnostics on separate threads if multiprocessing is
        # selected. Determine number of diagnostics to establish
        # multiprocessing pool size.
        n_diagnostic_threads = (min(len(diagnostics.keys())), mp.cpu_count())

        # Establish multiprocessing pool - each diagnostic processed on its
        # own thread.
        diagnostic_pool = mp.Pool(processes=n_diagnostic_threads)

        diagnostic_keys = diagnostics.keys()

        result = (diagnostic_pool.map_async(
            partial(process_diagnostic, diagnostics, neighbours, sites,
                    ancillary_data), diagnostic_keys))
        diagnostic_pool.close()
        diagnostic_pool.join()
        resulting_cubes = CubeList()
        extrema_cubes = CubeList()
        for result in result.get():
            resulting_cubes.append(result[0])
            extrema_cubes.append(result[1:])
    else:
        # Process diagnostics serially on one thread.
        resulting_cubes = CubeList()
        extrema_cubes = CubeList()
        for key in diagnostics.keys():
            resulting_cube, extrema_cubelist = (process_diagnostic(
                diagnostics, neighbours, sites, ancillary_data, key))
            resulting_cubes.append(resulting_cube)
            extrema_cubes.append(extrema_cubelist)
    return resulting_cubes, extrema_cubes
Beispiel #16
0
class AtmosFlow:
    """
    Atmospheric Flow

    Used to calculate meteorological parameters from the given cubes.
    Derived quantities are stored as cached properties to save computational
    time.

    Calculating quantites that involve on horizontal derivatives are only
    true if used in cartesian coordinates, e.g. on the output from a LAM model
    with constant grid spacing. Use `prepare_cube_on_model_levels` to prepare
    cubes for `AtmosFlow` on a cartesian grid.

    Attributes
    ----------
    cubes: iris.cube.CubeList
        list of cubes representing meteorological parameters
    main_cubes: iris.cube.CubeList
        list of non-scalar cubes
    wind_cmpnt: iris.cube.CubeList
        list of u,v,w-wind components
    {x,y,z}coord: iris.coord.Coord
        Coordinates in the respective dimensions
    pres: iris.cube.Cube
        Pressure created from a coordinate, if possible
    lats: iris.cube.Cube
        latitudes
    fcor: iris.cube.Cube
        Coriolis parameter (taken at 45N by default)
    d{x,y}: iris.cube.Cube
        Grid spacing (if cartesian=True)
    """
    def __init__(self, cartesian=True, **kw_vars):
        """
        Parameters
        ----------
        cartesian: bool (default True)
            Cartesian coord system flag
        **kw_vars: dict of iris cubes
            meteorological parameters

        Examples
        --------
        Initialise an `AtmosFlow` object with 3 wind components
        >>> AF = AtmosFlow(u=u_cart, v=v_cart, w=w_cart)
        and calculate relative vorticity:
        >>> rv = AF.rel_vort
        """
        self.__dict__.update(kw_vars)
        self.cartesian = cartesian

        self.cubes = CubeList(filter(iscube, self.__dict__.values()))
        self.main_cubes = CubeList(filter(iscube_and_not_scalar,
                                          self.__dict__.values()))
        self.wind_cmpnt = CubeList(filter(None,
                                          [getattr(self, 'u', None),
                                           getattr(self, 'v', None),
                                           getattr(self, 'w', None)]))
        thecube = self.main_cubes[0]

        check_coords(self.main_cubes)

        # Get the dim_coord, or None if none exist, for the xyz dimensions
        self.xcoord = thecube.coord(axis='X', dim_coords=True)
        self.ycoord = thecube.coord(axis='Y', dim_coords=True)
        self.zcoord = thecube.coord(axis='Z')
        if self.zcoord.units.is_convertible('Pa'):
            # Check if the vertical coordinate is pressure
            self.zmode = 'pcoord'
            for cube in self.main_cubes:
                if self.zcoord in cube.dim_coords:
                    clean_pressure_coord(cube)
            self.pres = coords.pres_coord_to_cube(thecube)
            self.cubes.append(self.pres)

        if not hasattr(self, 'lats'):
            try:
                _, lats = grid.unrotate_lonlat_grids(thecube)
            except (ValueError, AttributeError):
                lats = np.array([45.])
            self.lats = Cube(lats,
                             units='degrees',
                             standard_name='latitude')
        self.fcor = mcalc.coriolis_parameter(self.lats)
        self.fcor.convert_units('s-1')

        if self.cartesian:
            for ax, rot_name in zip(('x',  'y'),
                                    ('grid_longitude', 'grid_latitude')):
                for cube in self.cubes:
                    if rot_name in [i.name() for i in cube.coords(axis=ax)]:
                        cube.remove_coord(rot_name)

            try:
                _dx = thecube.attributes['um_res'].to_flt('m')
            except KeyError:
                _dx = 1.
            self.dx = Cube(_dx, units='m')
            self.dy = Cube(_dx, units='m')

        # Non-spherical coords?
        # self.horiz_cs = thecube.coord(axis='x', dim_coords=True).coord_system
        self.horiz_cs = thecube.coord_system()
        self._spherical_coords = isinstance(self.horiz_cs,
                                            (iris.coord_systems.GeogCS,
                                             iris.coord_systems.RotatedGeogCS))
        # todo: interface for spherical coordinates switch?
        # assert not self._spherical_coords,\
        #     'Only non-spherical coordinates are allowed ...'
        if self.cartesian and self._spherical_coords:
            warnings.warn('Cubes are in spherical coordinates!'
                          '\n Use `replace_lonlat_dimcoord_with_cart function`'
                          ' to change coordinates!')

    def __repr__(self):
        msg = "arke `Atmospheric Flow` containing of:\n"
        msg += "\n".join(tuple(i.name() for i in self.cubes))
        return msg

    def d_dx(self, name=None, alias=None):
        r"""
        Derivative of a cube along the x-axis
        .. math::
            \frac{\partial }{\partial x}
        """
        if name is None and isinstance(alias, str):
            v = getattr(self, alias)
        else:
            v = self.cubes.extract_strict(name)
        return cube_deriv(v, self.xcoord)

    def d_dy(self, name=None, alias=None):
        r"""
        Derivative of a cube along the y-axis
        .. math::
            \frac{\partial }{\partial y}
        """
        if name is None and isinstance(alias, str):
            v = getattr(self, alias)
        else:
            v = self.cubes.extract_strict(name)
        return cube_deriv(v, self.ycoord)

    def hgradmag(self, name=None, alias=None):
        r"""
        Magnitude of the horizontal gradient of a cube
        .. math::
            \sqrt[(\frac{\partial }{\partial y})^2
                 +(\frac{\partial }{\partial y})^2]
        """
        dvdx2 = self.d_dx(name=name, alias=alias) ** 2
        dvdy2 = self.d_dy(name=name, alias=alias) ** 2
        return (dvdx2 + dvdy2) ** 0.5

    @cached_property
    def wspd(self):
        r"""
        Calculate wind speed (magnitude)
        .. math::
            \sqrt{u^2 + v^2 + w^2}
        """
        res = 0
        for cmpnt in self.wind_cmpnt:
            res += cmpnt**2
        res = res**0.5
        res.rename('wind_speed')
        return res

    @cached_property
    def tke(self):
        r"""
        Calculate total kinetic energy
        .. math::
            0.5(u^2 + v^2 + w^2)
        """
        res = 0
        for cmpnt in self.wind_cmpnt:
            res += cmpnt**2
        res = 0.5 * res  # * self.density
        res.convert_units('m2 s-2')
        res.rename('total_kinetic_energy')
        return res

    @cached_property
    def du_dx(self):
        r"""
        Derivative of u-wind along the x-axis
        .. math::
            u_x = \frac{\partial u}{\partial x}
        """
        return cube_deriv(self.u, self.xcoord)

    @cached_property
    def du_dy(self):
        r"""
        Derivative of u-wind along the y-axis
        .. math::
            u_y = \frac{\partial u}{\partial y}
        """
        return cube_deriv(self.u, self.ycoord)

    @cached_property
    def du_dz(self):
        r"""
        Derivative of u-wind along the z-axis
        .. math::
            u_z = \frac{\partial u}{\partial z}
        """
        return cube_deriv(self.u, self.zcoord)

    @cached_property
    def dv_dx(self):
        r"""
        Derivative of v-wind along the x-axis
        .. math::
            v_x = \frac{\partial v}{\partial x}
        """
        return cube_deriv(self.v, self.xcoord)

    @cached_property
    def dv_dy(self):
        r"""
        Derivative of v-wind along the y-axis
        .. math::
            v_y = \frac{\partial v}{\partial y}
        """
        return cube_deriv(self.v, self.ycoord)

    @cached_property
    def dv_dz(self):
        r"""
        Derivative of v-wind along the z-axis
        .. math::
            v_z = \frac{\partial v}{\partial z}
        """
        return cube_deriv(self.v, self.zcoord)

    @cached_property
    def dw_dx(self):
        r"""
        Derivative of w-wind along the x-axis
        .. math::
            w_x = \frac{\partial w}{\partial x}
        """
        return cube_deriv(self.w, self.xcoord)

    @cached_property
    def dw_dy(self):
        r"""
        Derivative of w-wind along the y-axis
        .. math::
            w_y = \frac{\partial w}{\partial y}
        """
        return cube_deriv(self.w, self.ycoord)

    @cached_property
    def dw_dz(self):
        r"""
        Derivative of w-wind along the z-axis
        .. math::
            w_z = \frac{\partial w}{\partial z}
        """
        return cube_deriv(self.w, self.zcoord)

    @cached_property
    def rel_vort(self):
        r"""
        Calculate the vertical component of the vorticity vector
        .. math::
            \zeta = v_x - u_y
        """
        res = self.dv_dx - self.du_dy
        res.rename('atmosphere_relative_vorticity')
        res.convert_units('s-1')
        return res

    @cached_property
    def div_h(self):
        r"""
        Calculate the horizontal divergence
        .. math::
            D_h = u_x + v_y
        """
        res = self.du_dx + self.dv_dy
        res.rename('divergence_of_wind')
        res.convert_units('s-1')
        return res

    @cached_property
    def rel_vort_hadv(self):
        r"""
        Calculate the horizontal advection of relative vorticity
        .. math::
            \vec v\cdot \nabla \zeta
        """
        res = (self.u*cube_deriv(self.rel_vort, self.xcoord) +
               self.v*cube_deriv(self.rel_vort, self.ycoord))
        res.rename('horizontal_advection_of_atmosphere_relative_vorticity')
        res.convert_units('s-2')
        return res

    @cached_property
    def rel_vort_vadv(self):
        r"""
        Calculate the vertical advection of relative vorticity
        .. math::
            w\frac{\partial \zeta}{\partial z}
        """
        res = self.w*cube_deriv(self.rel_vort, self.zcoord)
        res.rename('vertical_advection_of_atmosphere_relative_vorticity')
        res.convert_units('s-2')
        return res

    @cached_property
    def rel_vort_stretch(self):
        r"""
        Stretching term
        .. math::
            \nabla\cdot\vec v (\zeta+f)
        """
        res = self.div_h * (self.rel_vort + self.fcor.data)
        res.rename('stretching_term_of_atmosphere_relative_vorticity_budget')
        res.convert_units('s-2')
        return res

    @cached_property
    def rel_vort_tilt(self):
        r"""
        Tilting (twisting) term
        .. math::
            \vec k \cdot \nabla w\times\frac{\partial\vec v}{\partial z} =
            \frac{\partial w}{\partial x}*\frac{\partial v}{\partial z} -
            \frac{\partial w}{\partial y}*\frac{\partial u}{\partial z}
        """
        res = self.dw_dx * self.dv_dz - self.dw_dy * self.du_dz
        res.rename('tilting_term_of_atmosphere_relative_vorticity_budget')
        res.convert_units('s-2')
        return res

    @cached_property
    def dfm_stretch(self):
        r"""
        Stretching deformation
        .. math::
            Def = u_x - v_y
        """
        res = self.du_dx - self.dv_dy
        res.rename('stretching_deformation_2d')
        res.convert_units('s-1')
        return res

    @cached_property
    def dfm_shear(self):
        r"""
        Shearing deformation
        .. math::
            Def' = u_y + v_x
        """
        res = self.du_dy + self.dv_dx
        res.rename('shearing_deformation_2d')
        res.convert_units('s-1')
        return res

    @cached_property
    def kvn(self):
        r"""
        Kinematic vorticity number

        .. math::
            W_k=\frac{||\Omega||}{||S||}=
            \frac{\sqrt{\zeta^2}}{\sqrt{D_h^2 + Def^2 + Def'^2}}
        where
        .. math::
            \zeta=v_x - u_y
            D_h = u_x + v_y
            Def = u_x - v_y
            Def' = u_y + v_x

        Reference:
            http://dx.doi.org/10.3402/tellusa.v68.29464
        """
        numerator = self.rel_vort
        denominator = (self.div_h**2 +
                       self.dfm_stretch**2 +
                       self.dfm_shear**2)**0.5
        res = numerator/denominator
        res.rename('kinematic_vorticity_number_2d')
        return res

    @cached_property
    def density(self):
        r"""
        Air density

        .. math::
            \rho = \frac{p}{R_d T}
        """
        p = self.cubes.extract_strict('air_pressure')
        rd = AuxCoord(mconst.Rd.data, units=mconst.Rd.units)
        try:
            temp = self.cubes.extract_strict('air_temperature')
        except iris.exceptions.ConstraintMismatchError:
            temp = self.temp
        res = p / (temp * rd)
        res.rename('air_density')
        res.convert_units('kg m-3')
        self.cubes.append(res)
        return res

    @cached_property
    def theta(self):
        r"""
        Air potential temperature

        If temperature is given:
        .. math::
            \theta = T (p_0/p)^{R_d/c_p}}
        """
        try:
            th = self.cubes.extract_strict('air_potential_temperature')
        except iris.exceptions.ConstraintMismatchError:
            p = self.cubes.extract_strict('air_pressure')
            temp = self.cubes.extract_strict('air_temperature')
            th = mcalc.potential_temperature(p, temp)
            th.rename('air_potential_temperature')
            th.convert_units('K')
            self.cubes.append(th)
        return th

    @cached_property
    def temp(self):
        r"""
        Air temperature

        If potential temperature is given:
        .. math::
            T = \theta (p/p_0)^{R_d/c_p}}
        """
        try:
            t = self.cubes.extract_strict('air_temperature')
        except iris.exceptions.ConstraintMismatchError:
            p0 = AuxCoord(mconst.P0.data, units=mconst.P0.units)
            kappa = mconst.kappa.data
            p = self.cubes.extract_strict('air_pressure')
            t = self.theta * (p / p0) ** kappa
            t.rename('air_temperature')
            t.convert_units('K')
            self.cubes.append(t)
        return t

    @cached_property
    def mixr(self):
        r"""
        Water vapour mixing ratio

        """
        try:
            mixr = self.cubes.extract_strict('mixing_ratio')
        except iris.exceptions.ConstraintMismatchError:
            spechum = self.cubes.extract_strict('specific_humidity')
            mixr = mcalc.specific_humidity_to_mixing_ratio(spechum)
            self.cubes.append(mixr)
        return mixr

    @cached_property
    def thetae(self):
        r"""
        Equivalent potential temperature

        .. math::
            \theta_e = \theta e^\frac{L_v r_s}{C_{pd} T}
        """
        try:
            th = self.cubes.extract_strict('equivalent_potential_temperature')
        except iris.exceptions.ConstraintMismatchError:
            p = self.cubes.extract_strict('air_pressure')
            temp = self.cubes.extract_strict('air_temperature')
            spechum = self.cubes.extract_strict('specific_humidity')
            mixr = mcalc.specific_humidity_to_mixing_ratio(spechum)
            e = mcalc.vapor_pressure(p, mixr)
            dew = mcalc.dewpoint(e)
            dew.convert_units('K')
            th = mcalc.equivalent_potential_temperature(p, temp, dew)
            th.rename('equivalent_potential_temperature')
            th.convert_units('K')
            self.cubes.append(th)
        return th

    @cached_property
    def relh(self):
        r""" Relative humdity """
        try:
            rh = self.cubes.extract_strict('relative_humidity')
        except iris.exceptions.ConstraintMismatchError:
            p = self.cubes.extract_strict('air_pressure')
            temp = self.cubes.extract_strict('air_temperature')
            spechum = self.cubes.extract_strict('specific_humidity')
            rh = mcalc.specific_to_relative_humidity(p, temp, spechum)
            rh.rename('relative_humidity')
            rh.convert_units('1')
            self.cubes.append(rh)
        return rh

    @cached_property
    def specific_volume(self):
        r"""
        Air Specific Volume

        .. math::
            \alpha = \rho^{-1}
        """
        res = self.density ** (-1)
        res.rename('air_specific_volume')
        self.main_cubes.append(res)
        return res

    @cached_property
    def dp_dx(self):
        r"""
        Derivative of pressure along the x-axis
        .. math::
            p_x = \frac{\partial p}{\partial x}
        """
        return cube_deriv(self.pres, self.xcoord)

    @cached_property
    def dp_dy(self):
        r"""
        Derivative of pressure along the y-axis
        .. math::
            p_y = \frac{\partial p}{\partial y}
        """
        return cube_deriv(self.pres, self.ycoord)

    @cached_property
    def dsv_dx(self):
        r"""
        Derivative of specific volume along the x-axis
        .. math::
            \alpha_x = \frac{\partial\alpha}{\partial x}
        """
        return cube_deriv(self.specific_volume, self.xcoord)

    @cached_property
    def dsv_dy(self):
        r"""
        Derivative of specific volume along the y-axis
        .. math::
            \alpha_y = \frac{\partial\alpha}{\partial y}
        """
        return cube_deriv(self.specific_volume, self.ycoord)

    @cached_property
    def baroclinic_term(self):
        r"""
        Baroclinic term of relative vorticity budget

        .. math::
            \frac{\partial p}{\partial x}\frac{\partial\alpha}{\partial y}
            - \frac{\partial p}{\partial y}\frac{\partial\alpha}{\partial x}
        """
        res = (self.dp_dx * self.dsv_dy
               - self.dp_dy * self.dsv_dx)
        res.rename('baroclinic_term_of_atmosphere_relative_vorticity_budget')
        res.convert_units('s-1')
        return res

    @cached_property
    def brunt_vaisala_squared(self):
        r"""
        Brunt–Väisälä frequency squared

        (pressure coordinates)
        .. math::
            N^2 = -\frac{g^2 \rho}{\theta}\frac{\partial \theta}{\partial p}
        """
        dthdp = cube_deriv(self.theta, self.zcoord)
        g2 = -1 * mconst.g**2
        g2 = AuxCoord(g2.data, units=g2.units)
        res = self.density / self.theta * dthdp * g2
        res.rename('square_of_brunt_vaisala_frequency_in_air')
        res.convert_units('s-2')
        return res

    @cached_property
    def eady_growth_rate(self):
        r"""
        Eady growth rate

        (pressure coordinates)
        .. math::
            EGR = 0.31 * \frac{f}{N}\frac{\partial V}{\partial z}
                = 0.31 * \frac{-\rho g f}{N}\frac{\partial V}{\partial p}
        """
        factor = -1 * mconst.egr_factor * self.fcor * mconst.g
        dvdp = cube_deriv(self.wspd, self.zcoord)
        dvdp.data = abs(dvdp.data)
        res = (self.density * factor.data * AuxCoord(1, units=factor.units)
               * dvdp / self.brunt_vaisala_squared**0.5)
        res.rename('eady_growth_rate')
        res.convert_units('s-1')
        return res

    @cached_property
    def kinematic_frontogenesis(self):
        r"""
        2D Kinematic frontogenesis from MetPy package

        .. math::
            F=\frac{1}{2}\left|\nabla \theta\right|[D cos(2\beta)-\delta]
        """
        res = mcalc.frontogenesis(self.theta, self.u, self.v,
                                  self.dx, self.dy, dim_order='yx')
        res.rename('kinematic_frontogenesis')
        res.convert_units('K m-1 s-1')
        return res
def load(filenames, callback=None):
    """
    Load structured FieldsFiles and PP files.

    Args:

    * filenames:
        One or more filenames.


    Kwargs:

    * callback:
        A modifier/filter function. Please see the module documentation
        for :mod:`iris`.

        .. note::

            Unlike the standard :func:`iris.load` operation, the callback is
            applied to the final result cubes, not individual input fields.

    Returns:
        An :class:`iris.cube.CubeList`.


    This is a streamlined load operation, to be used only on fieldsfiles or PP
    files whose fields repeat regularly over the same vertical levels and
    times. The results aim to be equivalent to those generated by
    :func:`iris.load`, but the operation is substantially faster for input that
    is structured.

    The structured input files should conform to the following requirements:

    *  the file must contain fields for all possible combinations of the
       vertical levels and time points found in the file.

    *  the fields must occur in a regular repeating order within the file.

       (For example: a sequence of fields for NV vertical levels, repeated
       for NP different forecast periods, repeated for NT different forecast
       times).

    *  all other metadata must be identical across all fields of the same
       phenomenon.

    Each group of fields with the same values of LBUSER4, LBUSER7 and LBPROC
    is identified as a separate phenomenon:  These groups are processed
    independently and returned as separate result cubes.

    .. note::

        Each input file is loaded independently.  Thus a single result cube can
        not combine data from multiple input files.

    .. note::

        The resulting time-related coordinates ('time', 'forecast_time' and
        'forecast_period') may be mapped to shared cube dimensions and in some
        cases can also be multidimensional.  However, the vertical level
        information *must* have a simple one-dimensional structure, independent
        of the time points, otherwise an error will be raised.

    .. note::

        Where input data does *not* have a fully regular arrangement, the
        corresponding result cube will have a single anonymous extra dimension
        which indexes over all the input fields.

        This can happen if, for example, some fields are missing; or have
        slightly different metadata; or appear out of order in the file.

    .. warning::

        Any non-regular metadata variation in the input should be strictly
        avoided, as not all irregularities are detected, which can cause
        erroneous results.


    """
    warn_deprecated(
        "The module 'iris.experimental.fieldsfile' is deprecated. "
        "Please use the 'iris.fileformats.um.structured_um_loading' facility "
        "as a replacement."
        "\nA call to 'iris.experimental.fieldsfile.load' can be replaced with "
        "'iris.load_raw', within a 'structured_um_loading' context.")
    loader = Loader(_collations_from_filename, {}, _convert_collation, None)
    return CubeList(load_cubes(filenames, callback, loader, None))
Beispiel #18
0
def plot_hydrometeors_composite_integrated(Hydrometeors_Composite,
                                           maxvalue=None,
                                           aggregate_min=None,
                                           mp=None,
                                           xlim=None,
                                           ylim=None,
                                           xlim_profile=None,
                                           ylim_integrated=None,
                                           title=None,
                                           figsize=(20 / 2.54, 10 / 2.54),
                                           height_ratios=[1.8, 1],
                                           width_ratios=[4, 1]):

    from mpdiag import hydrometeors_colors
    from iris.analysis import MEAN, SUM
    from iris.coord_categorisation import add_categorised_coord
    from iris.cube import CubeList

    from copy import deepcopy
    dict_hydrometeors_colors, dict_hydrometeors_names = hydrometeors_colors(
        microphysics_scheme=mp)

    if xlim is None:
        xlim = [
            Hydrometeors_Composite[0].coord('time').points[0],
            Hydrometeors_Composite[0].coord('time').points[-1]
        ]
    if ylim is None:
        ylim = [
            Hydrometeors_Composite[0].coord('geopotential_height').points[0] /
            1000,
            Hydrometeors_Composite[0].coord('geopotential_height').points[-1] /
            1000
        ]

    fig, ax = plt.subplots(
        nrows=2,
        ncols=2,
        #sharex='col', sharey='row',
        gridspec_kw={
            'height_ratios': height_ratios,
            'width_ratios': width_ratios
        },
        figsize=figsize)

    fig.subplots_adjust(left=0.1,
                        right=0.95,
                        bottom=0.1,
                        top=0.93,
                        wspace=0.1,
                        hspace=0.2)

    Hydrometeors_Composite_copy = deepcopy(Hydrometeors_Composite)

    if aggregate_min is not None:

        def get_min(coord, value):
            minutes = value
            return np.floor(
                minutes / aggregate_min) * aggregate_min + aggregate_min / 2

        hydrometeors_aggregated = CubeList()
        for cube in Hydrometeors_Composite_copy:
            if aggregate_min == 5:
                add_categorised_coord(cube, 'time_aggregated', 'time', get_min)
                hydrometeors_aggregated.append(
                    cube.aggregated_by(['time_aggregated'], MEAN))

        hydrometeors_piecharts = hydrometeors_aggregated
    else:
        hydrometeors_piecharts = hydrometeors_Composite

    plot_hydrometeors_color_time(hydrometeors_piecharts,
                                 Aux=None,
                                 axes=ax[0, 0],
                                 microphysics_scheme=mp,
                                 scaling='linear',
                                 minvalue=0,
                                 maxvalue=maxvalue,
                                 vscale=maxvalue,
                                 piecharts_rasterized=False,
                                 legend_piecharts=True,
                                 fontsize_legend=6,
                                 legend_piecharts_pos=(1.05, -0.25),
                                 legend_overlay=False,
                                 legend_overlay_pos=(1, -0.6),
                                 overlay=False,
                                 xlabel=False,
                                 ylabel=False,
                                 xlim=xlim,
                                 ylim=ylim,
                                 scale=True,
                                 unit_scale='kg m$^{-1}$ s$^{-1}$',
                                 fontsize_scale=6,
                                 x_shift=0)

    ax[0, 0].plot([0, 0], [0, 20000], color='grey', ls='-')
    for cube in Hydrometeors_Composite:
        color = dict_hydrometeors_colors[cube.name()]
        ax[0, 1].plot(cube.collapsed(('time'), MEAN).data,
                      cube.coord('geopotential_height').points / 1000,
                      color=color)
        ax[1, 0].plot(cube.coord('time').points,
                      cube.collapsed(('geopotential_height'), SUM).data,
                      color=color)
        #ax1[1,0].set_ylim(0,1000)
#     ax[1,0].plot([0,0],[0,2e10],color='grey',ls='-')
    ax[1, 1].axis('off')

    ax[0, 0].set_ylabel('altitude (km)')
    ax[1, 0].set_xlabel('time (min)')
    ax[1, 0].set_xlim(xlim)
    ax[1, 0].set_ylim(ylim_integrated)
    ax[0, 1].set_ylim(ylim)
    ax[0, 1].set_xlim(xlim_profile)

    ax[1, 0].set_ylabel('integrated (kg $^{-1}$)')
    ax[1, 0].ticklabel_format(style='sci', axis='y', scilimits=(0, 0))
    ax[0, 1].ticklabel_format(style='sci', axis='x', scilimits=(0, 0))
    ax[0, 0].xaxis.set_tick_params(labelbottom=False)
    ax[0, 1].yaxis.set_tick_params(labelleft=False)
    ax[0, 1].set_xlabel('integrated (kg m$^{-1}$)', labelpad=10)

    if title:
        ax[0, 0].set_title(title, loc='left')

    return fig
class Test_create_template_slice(IrisTest):
    """Test create_template_slice method"""
    def setUp(self):
        """
        Set up a basic input cube. Input cube has 2 thresholds on and 3
        forecast_reference_times
        """
        thresholds = [10, 20]
        data = np.ones((2, 2, 3), dtype=np.float32)
        cycle1 = set_up_probability_cube(
            data,
            thresholds,
            spatial_grid="equalarea",
            time=datetime(2017, 11, 10, 4, 0),
            frt=datetime(2017, 11, 10, 0, 0),
        )
        cycle2 = set_up_probability_cube(
            data,
            thresholds,
            spatial_grid="equalarea",
            time=datetime(2017, 11, 10, 4, 0),
            frt=datetime(2017, 11, 10, 1, 0),
        )
        cycle3 = set_up_probability_cube(
            data,
            thresholds,
            spatial_grid="equalarea",
            time=datetime(2017, 11, 10, 4, 0),
            frt=datetime(2017, 11, 10, 2, 0),
        )
        self.cube_to_collapse = CubeList([cycle1, cycle2, cycle3]).merge_cube()
        self.cube_to_collapse = squeeze(self.cube_to_collapse)
        self.cube_to_collapse.rename("weights")
        # This input array has 3 forecast reference times and 2 thresholds.
        # The two thresholds have the same weights.
        self.cube_to_collapse.data = np.array(
            [[[[1, 0, 1], [1, 1, 1]], [[1, 0, 1], [1, 1, 1]]],
             [[[0, 0, 1], [0, 1, 1]], [[0, 0, 1], [0, 1, 1]]],
             [[[1, 1, 1], [1, 1, 1]], [[1, 1, 1], [1, 1, 1]]]],
            dtype=np.float32)
        self.cube_to_collapse.data = np.ma.masked_equal(
            self.cube_to_collapse.data, 0)
        self.plugin = SpatiallyVaryingWeightsFromMask()

    def test_multi_dim_blend_coord_fail(self):
        """Test error is raised when we have a multi-dimensional blend_coord"""
        # Add a surface altitude coordinate which covers x and y dimensions.
        altitudes = np.array([[10, 20, 30], [20, 30, 10]])
        altitudes_coord = AuxCoord(altitudes,
                                   standard_name="surface_altitude",
                                   units="m")
        self.cube_to_collapse.add_aux_coord(altitudes_coord, data_dims=(2, 3))
        message = ("Blend coordinate must only be across one dimension.")
        with self.assertRaisesRegex(ValueError, message):
            self.plugin.create_template_slice(self.cube_to_collapse,
                                              "surface_altitude")

    def test_varying_mask_fail(self):
        """Test error is raised when mask varies along collapsing dim"""
        # Check fails when blending along threshold coordinate, as mask
        # varies along this coordinate.
        threshold_coord = find_threshold_coordinate(self.cube_to_collapse)
        message = (
            "The mask on the input cube can only vary along the blend_coord")
        with self.assertRaisesRegex(ValueError, message):
            self.plugin.create_template_slice(self.cube_to_collapse,
                                              threshold_coord.name())

    def test_scalar_blend_coord_fail(self):
        """Test error is raised when blend_coord is scalar"""
        message = ("Blend coordinate must only be across one dimension.")
        with self.assertRaisesRegex(ValueError, message):
            self.plugin.create_template_slice(self.cube_to_collapse[0],
                                              "forecast_reference_time")

    def test_basic(self):
        """Test a correct template slice is returned for simple case"""
        expected = self.cube_to_collapse.copy()[:, 0, :, :]
        result = self.plugin.create_template_slice(self.cube_to_collapse,
                                                   "forecast_reference_time")
        self.assertEqual(expected.metadata, result.metadata)
        self.assertArrayAlmostEqual(expected.data, result.data)

    def test_basic_no_change(self):
        """Test a correct template slice is returned for a case where
           no slicing is needed"""
        input_cube = self.cube_to_collapse.copy()[:, 0, :, :]
        expected = input_cube.copy()
        result = self.plugin.create_template_slice(self.cube_to_collapse,
                                                   "forecast_reference_time")
        self.assertEqual(expected.metadata, result.metadata)
        self.assertArrayAlmostEqual(expected.data, result.data)

    def test_aux_blending_coord(self):
        """Test a correct template slice is returned when blending_coord is
           an AuxCoord"""
        expected = self.cube_to_collapse.copy()[:, 0, :, :]
        result = self.plugin.create_template_slice(self.cube_to_collapse,
                                                   "forecast_period")
        self.assertEqual(expected.metadata, result.metadata)
        self.assertArrayAlmostEqual(expected.data, result.data)
Beispiel #20
0
    def process(self, cubelist: CubeList) -> Cube:
        """
        Produce Nowcast of lightning probability.

        Args:
            cubelist:
                Where thresholds are listed, only these threshold values will
                    be used.
                Contains cubes of
                    * First-guess lightning probability
                    * Nowcast precipitation probability
                        (required thresholds: > 0.5, 7., 35. mm hr-1)
                    * Nowcast lightning rate
                    * (optional) Analysis of vertically integrated ice (VII)
                      from radar thresholded into probability slices
                      at self.ice_thresholds.

        Returns:
            Output cube containing Nowcast lightning probability.
            This cube will have the same dimensions as the input
            Nowcast precipitation probability after the threshold coord
            has been removed.

        Raises:
            iris.exceptions.ConstraintMismatchError:
                If cubelist does not contain the expected cubes.
        """
        first_guess_lightning_cube = cubelist.extract(
            "probability_of_rate_of_lightning_above_threshold", strict=True)
        lightning_rate_cube = cubelist.extract("rate_of_lightning",
                                               strict=True)
        lightning_rate_cube.convert_units("min^-1")  # Ensure units are correct
        prob_precip_cube = cubelist.extract(
            "probability_of_lwe_precipitation_rate_above_threshold",
            strict=True)
        # Now find prob_vii_cube. Can't use strict=True here as cube may not be
        # present, so will use a normal extract and then merge_cube if needed.
        prob_vii_cube = cubelist.extract(
            "probability_of_vertical_integral_of_ice_above_threshold")
        if prob_vii_cube:
            prob_vii_cube = prob_vii_cube.merge_cube()
        precip_threshold_coord = find_threshold_coordinate(prob_precip_cube)
        precip_threshold_coord.convert_units("mm hr-1")
        precip_slice = prob_precip_cube.extract(
            iris.Constraint(
                coord_values={
                    precip_threshold_coord: lambda t: isclose(t.point, 0.5)
                }))
        if not isinstance(precip_slice, iris.cube.Cube):
            raise ConstraintMismatchError(
                "Cannot find prob(precip > 0.5 mm hr-1) cube in cubelist.")
        template_cube = self._update_metadata(precip_slice)
        new_cube = self._modify_first_guess(
            template_cube,
            first_guess_lightning_cube,
            lightning_rate_cube,
            prob_precip_cube,
            prob_vii_cube,
        )
        # Adjust data so that lightning probability does not decrease too
        # rapidly with distance.
        self.neighbourhood(new_cube)
        return new_cube
class Test_multiply_weights(IrisTest):
    """Test multiply_weights method"""
    def setUp(self):
        """
        Set up a basic weights cube with 2 thresholds to multiple with
        a cube with one_dimensional weights.
        """
        thresholds = [10, 20]
        data = np.ones((2, 2, 3), dtype=np.float32)
        cycle1 = set_up_probability_cube(
            data,
            thresholds,
            spatial_grid="equalarea",
            time=datetime(2017, 11, 10, 4, 0),
            frt=datetime(2017, 11, 10, 0, 0),
        )
        cycle2 = set_up_probability_cube(
            data,
            thresholds,
            spatial_grid="equalarea",
            time=datetime(2017, 11, 10, 4, 0),
            frt=datetime(2017, 11, 10, 1, 0),
        )
        cycle3 = set_up_probability_cube(
            data,
            thresholds,
            spatial_grid="equalarea",
            time=datetime(2017, 11, 10, 4, 0),
            frt=datetime(2017, 11, 10, 2, 0),
        )
        self.spatial_weights_cube = CubeList([cycle1, cycle2,
                                              cycle3]).merge_cube()
        self.spatial_weights_cube = squeeze(self.spatial_weights_cube)
        self.spatial_weights_cube.rename("weights")
        # This input array has 3 forecast reference times and 2 thresholds.
        # The two thresholds have the same weights.
        self.spatial_weights_cube.data = np.array(
            [[[[1, 0, 1], [1, 0, 1]], [[1, 0, 1], [1, 0, 1]]],
             [[[0, 0, 1], [0, 0, 1]], [[0, 0, 1], [0, 0, 1]]],
             [[[1, 1, 1], [1, 1, 1]], [[1, 1, 1], [1, 1, 1]]]],
            dtype=np.float32)
        # Create a one_dimensional weights cube by slicing the
        # larger weights cube.
        # The resulting cube only has a forecast_reference_time coordinate.
        self.one_dimensional_weights_cube = (self.spatial_weights_cube[:, 0, 0,
                                                                       0])
        self.one_dimensional_weights_cube.remove_coord(
            "projection_x_coordinate")
        self.one_dimensional_weights_cube.remove_coord(
            "projection_y_coordinate")
        self.one_dimensional_weights_cube.remove_coord(
            find_threshold_coordinate(self.one_dimensional_weights_cube))
        self.one_dimensional_weights_cube.data = np.array([0.2, 0.5, 0.3],
                                                          dtype=np.float32)
        self.plugin = SpatiallyVaryingWeightsFromMask()

    def test_basic(self):
        """Test a basic cube multiplication with a 4D input cube"""
        expected_result = np.array(
            [[[[0.2, 0, 0.2], [0.2, 0, 0.2]], [[0.2, 0, 0.2], [0.2, 0, 0.2]]],
             [[[0, 0, 0.5], [0, 0, 0.5]], [[0, 0, 0.5], [0, 0, 0.5]]],
             [[[0.3, 0.3, 0.3], [0.3, 0.3, 0.3]],
              [[0.3, 0.3, 0.3], [0.3, 0.3, 0.3]]]],
            dtype=np.float32)
        result = self.plugin.multiply_weights(
            self.spatial_weights_cube, self.one_dimensional_weights_cube,
            "forecast_reference_time")
        self.assertArrayAlmostEqual(result.data, expected_result)
        self.assertEqual(result.metadata, self.spatial_weights_cube.metadata)

    def test_3D_cube(self):
        """Test when we only have a 3D cube as input."""
        expected_result = np.array(
            [[[0.2, 0, 0.2], [0.2, 0, 0.2]], [[0, 0, 0.5], [0, 0, 0.5]],
             [[0.3, 0.3, 0.3], [0.3, 0.3, 0.3]]],
            dtype=np.float32)
        result = self.plugin.multiply_weights(
            self.spatial_weights_cube[:, 0, :, :],
            self.one_dimensional_weights_cube, "forecast_reference_time")
        self.assertArrayAlmostEqual(result.data, expected_result)
        self.assertEqual(result.metadata,
                         self.spatial_weights_cube[:, 0, :, :].metadata)

    def test_2D_cube(self):
        """Test when we only have a 2D cube as input.
        This probably won't happen in reality, but check it does something
        sensible anyway."""
        expected_result = np.array([[0.2, 0, 0.2], [0.2, 0, 0.2]],
                                   dtype=np.float32)
        result = self.plugin.multiply_weights(
            self.spatial_weights_cube[0, 0, :, :],
            self.one_dimensional_weights_cube[0], "forecast_reference_time")
        self.assertArrayAlmostEqual(result.data, expected_result)
        self.assertEqual(result.metadata,
                         self.spatial_weights_cube[0, 0, :, :].metadata)

    def test_mismatching_cubes(self):
        """Test the input cubes don't match along the blend_coord dim"""
        message = ("The blend_coord forecast_reference_time does not "
                   "match on weights_from_mask and "
                   "one_dimensional_weights_cube")
        with self.assertRaisesRegex(ValueError, message):
            self.plugin.multiply_weights(self.spatial_weights_cube,
                                         self.one_dimensional_weights_cube[0],
                                         "forecast_reference_time")

    def test_wrong_coord(self):
        """Test when we try to multiply over a coordinate not in the weights
        cubes."""
        message = (
            "Expected to find exactly 1 model coordinate, but found none.")
        with self.assertRaisesRegex(CoordinateNotFoundError, message):
            self.plugin.multiply_weights(self.spatial_weights_cube,
                                         self.one_dimensional_weights_cube,
                                         "model")
class Test_normalised_masked_weights(IrisTest):
    """Test normalised_masked_weights method"""
    def setUp(self):
        """Set up a cube with 2 thresholds to test normalisation. We are
        testing normalising along the leading dimension in this cube."""
        thresholds = [10, 20]
        data = np.ones((2, 2, 3), dtype=np.float32)
        cycle1 = set_up_probability_cube(
            data,
            thresholds,
            spatial_grid="equalarea",
            time=datetime(2017, 11, 10, 4, 0),
            frt=datetime(2017, 11, 10, 0, 0),
        )
        cycle2 = set_up_probability_cube(
            data,
            thresholds,
            spatial_grid="equalarea",
            time=datetime(2017, 11, 10, 4, 0),
            frt=datetime(2017, 11, 10, 1, 0),
        )
        cycle3 = set_up_probability_cube(
            data,
            thresholds,
            spatial_grid="equalarea",
            time=datetime(2017, 11, 10, 4, 0),
            frt=datetime(2017, 11, 10, 2, 0),
        )
        self.spatial_weights_cube = CubeList([cycle1, cycle2,
                                              cycle3]).merge_cube()
        self.spatial_weights_cube = squeeze(self.spatial_weights_cube)
        self.spatial_weights_cube.rename("weights")
        # This input array has 3 forecast reference times and 2 thresholds.
        # The two thresholds have the same weights.
        self.spatial_weights_cube.data = np.array(
            [[[[0.2, 0, 0.2], [0.2, 0, 0.2]], [[0.2, 0, 0.2], [0.2, 0, 0.2]]],
             [[[0, 0, 0.5], [0, 0, 0.5]], [[0, 0, 0.5], [0, 0, 0.5]]],
             [[[0.3, 0.3, 0.3], [0.3, 0.3, 0.3]],
              [[0.3, 0.3, 0.3], [0.3, 0.3, 0.3]]]],
            dtype=np.float32)
        self.plugin = SpatiallyVaryingWeightsFromMask()

    @ManageWarnings(
        ignored_messages=["Collapsing a non-contiguous coordinate."])
    def test_basic(self):
        """Test a basic example normalising along forecast_reference_time"""
        expected_result = np.array(
            [[[[0.4, 0, 0.2], [0.4, 0, 0.2]], [[0.4, 0, 0.2], [0.4, 0, 0.2]]],
             [[[0, 0, 0.5], [0, 0, 0.5]], [[0, 0, 0.5], [0, 0, 0.5]]],
             [[[0.6, 1.0, 0.3], [0.6, 1.0, 0.3]],
              [[0.6, 1.0, 0.3], [0.6, 1.0, 0.3]]]],
            dtype=np.float32)
        result = self.plugin.normalised_masked_weights(
            self.spatial_weights_cube, "forecast_reference_time")
        self.assertArrayAlmostEqual(result.data, expected_result)
        self.assertEqual(result.metadata, self.spatial_weights_cube.metadata)

    @ManageWarnings(
        ignored_messages=["Collapsing a non-contiguous coordinate."])
    def test_less_input_dims(self):
        """Test a smaller input cube"""
        expected_result = np.array(
            [[[0.4, 0, 0.2], [0.4, 0, 0.2]], [[0, 0, 0.5], [0, 0, 0.5]],
             [[0.6, 1.0, 0.3], [0.6, 1.0, 0.3]]],
            dtype=np.float32)
        result = self.plugin.normalised_masked_weights(
            self.spatial_weights_cube[:, 0, :, :], "forecast_reference_time")
        self.assertArrayAlmostEqual(result.data, expected_result)
        self.assertEqual(result[:, 0, :, :].metadata,
                         self.spatial_weights_cube.metadata)

    @ManageWarnings(
        ignored_messages=["Collapsing a non-contiguous coordinate."])
    def test_transpose_cube(self):
        """Test the function still works when we transpose the input cube.
           Same as test_basic except for transpose."""
        expected_result = np.array(
            [[[[0.4, 0, 0.2], [0.4, 0, 0.2]], [[0.4, 0, 0.2], [0.4, 0, 0.2]]],
             [[[0, 0, 0.5], [0, 0, 0.5]], [[0, 0, 0.5], [0, 0, 0.5]]],
             [[[0.6, 1.0, 0.3], [0.6, 1.0, 0.3]],
              [[0.6, 1.0, 0.3], [0.6, 1.0, 0.3]]]],
            dtype=np.float32)
        # The function always puts the blend_coord as a leading dimension.
        # The process method will ensure the order of the output dimensions
        # matches those in the input.
        expected_result = np.transpose(expected_result, axes=[0, 3, 2, 1])
        self.spatial_weights_cube.transpose(new_order=[3, 2, 0, 1])
        result = self.plugin.normalised_masked_weights(
            self.spatial_weights_cube, "forecast_reference_time")
        self.assertArrayAlmostEqual(result.data, expected_result)
        self.assertEqual(result.metadata, self.spatial_weights_cube.metadata)

    @ManageWarnings(
        ignored_messages=["Collapsing a non-contiguous coordinate."])
    def test_already_normalised(self):
        """Test nothing happens if the input data is already normalised."""
        self.spatial_weights_cube.data = np.array(
            [[[[0.4, 0, 0.2], [0.4, 0, 0.2]], [[0.4, 0, 0.2], [0.4, 0, 0.2]]],
             [[[0, 0, 0.5], [0, 0, 0.5]], [[0, 0, 0.5], [0, 0, 0.5]]],
             [[[0.6, 1.0, 0.3], [0.6, 1.0, 0.3]],
              [[0.6, 1.0, 0.3], [0.6, 1.0, 0.3]]]],
            dtype=np.float32)
        result = self.plugin.normalised_masked_weights(
            self.spatial_weights_cube, "forecast_reference_time")
        self.assertArrayAlmostEqual(result.data,
                                    self.spatial_weights_cube.data)
        self.assertEqual(result.metadata, self.spatial_weights_cube.metadata)

    @ManageWarnings(
        ignored_messages=["Collapsing a non-contiguous coordinate."])
    def test_weights_sum_to_zero(self):
        """Test all x-y slices have zero weight in the same index.
           This case corresponds to the case when all the input fields are
           all masked in the same place."""
        self.spatial_weights_cube.data[:, :, :, 0] = 0
        expected_result = np.array(
            [[[[0, 0, 0.2], [0, 0, 0.2]], [[0, 0, 0.2], [0, 0, 0.2]]],
             [[[0, 0, 0.5], [0, 0, 0.5]], [[0, 0, 0.5], [0, 0, 0.5]]],
             [[[0, 1.0, 0.3], [0, 1.0, 0.3]], [[0, 1.0, 0.3], [0, 1.0, 0.3]]]],
            dtype=np.float32)
        result = self.plugin.normalised_masked_weights(
            self.spatial_weights_cube, "forecast_reference_time")
        self.assertArrayAlmostEqual(result.data, expected_result)
        self.assertEqual(result.metadata, self.spatial_weights_cube.metadata)
Beispiel #23
0
CUBE_IN_SHORT = iris.cube.Cube(
    [3.14, 6.28, 9.42],
    dim_coords_and_dims=[(DIM_COORD_SHORT, 0)],
    aux_coords_and_dims=[(AUX_COORD, 0)],
)
CUBE_IN_LONG = iris.cube.Cube(
    [3.14, 6.28, 9.42],
    dim_coords_and_dims=[(DIM_COORD_LONG, 0)],
    aux_coords_and_dims=[(AUX_COORD, 0)],
)
CUBE_OUT_LONG = iris.cube.Cube(
    [3.14, 6.28, 9.42],
    dim_coords_and_dims=[(DIM_COORD_ROUNDED, 0)],
    aux_coords_and_dims=[(AUX_COORD, 0)],
)

CUBES_TO_FIX = [
    (CubeList([CUBE_IN_SHORT]), CubeList([CUBE_IN_SHORT])),
    (CubeList([CUBE_IN_LONG]), CubeList([CUBE_OUT_LONG])),
    (CubeList([CUBE_IN_LONG,
               CUBE_IN_SHORT]), CubeList([CUBE_OUT_LONG, CUBE_IN_SHORT])),
]


@pytest.mark.parametrize('cubes_in,cubes_out', CUBES_TO_FIX)
def test_tas(cubes_in, cubes_out):
    fix = tas()
    new_cubes = fix.fix_metadata(cubes_in)
    assert new_cubes is cubes_in
    assert new_cubes == cubes_out
Beispiel #24
0
 def setUp(self):
     """Set up the plugin and dataframe needed for this test"""
     self.cubes = CubeList([set_up_spot_cube(280)])
     self.data_directory = mkdtemp()
     self.plugin = SpotDatabase("csv", self.data_directory + "/test.csv",
                                "improver", "time", "index")
Beispiel #25
0
 def setUp(self):
     """Set up the plugin and dataframe needed for this test"""
     cubes = CubeList([set_up_spot_cube(280)])
     self.plugin = SpotDatabase("csv", "output", "improver", "time",
                                "index")
     self.dataframe = self.plugin.to_dataframe(cubes)
Beispiel #26
0
    def process(self, cubes: Union[CubeList, List[Cube]]) -> Cube:
        """
        Use the wet bulb temperature integral to find the altitude at which a
        phase change occurs (e.g. snow to sleet). This is achieved by finding
        the height above sea level at which the integral matches an empirical
        threshold that is expected to correspond with the phase change. This
        empirical threshold is the falling_level_threshold. Fill in missing
        data appropriately.

        Args:
            cubes containing:
                wet_bulb_temperature:
                    Cube of wet bulb temperatures on height levels.
                wet_bulb_integral:
                    Cube of wet bulb temperature integral (Kelvin-metres).
                orog:
                    Cube of orography (m).
                land_sea_mask:
                    Cube containing a binary land-sea mask, with land points
                    set to one and sea points set to zero.

        Returns:
            Cube of phase change level above sea level (asl).
        """

        names_to_extract = [
            "wet_bulb_temperature",
            "wet_bulb_temperature_integral",
            "surface_altitude",
            "land_binary_mask",
        ]
        if len(cubes) != len(names_to_extract):
            raise ValueError(
                f"Expected {len(names_to_extract)} cubes, found {len(cubes)}")

        wet_bulb_temperature, wet_bulb_integral, orog, land_sea_mask = tuple(
            CubeList(cubes).extract_strict(n) for n in names_to_extract)

        wet_bulb_temperature.convert_units("celsius")
        wet_bulb_integral.convert_units("K m")

        # Ensure the wet bulb integral cube's height coordinate is in
        # descending order
        wet_bulb_integral = sort_coord_in_cube(wet_bulb_integral,
                                               "height",
                                               descending=True)

        # Find highest height from height bounds.
        wbt_height_points = wet_bulb_temperature.coord("height").points
        if wet_bulb_integral.coord("height").bounds is None:
            highest_height = wbt_height_points[-1]
        else:
            highest_height = wet_bulb_integral.coord("height").bounds[0][-1]

        # Firstly we need to slice over height, x and y
        x_coord = wet_bulb_integral.coord(axis="x").name()
        y_coord = wet_bulb_integral.coord(axis="y").name()
        orography = next(orog.slices([y_coord, x_coord]))
        land_sea_data = next(land_sea_mask.slices([y_coord, x_coord])).data
        max_nbhood_orog = self.find_max_in_nbhood_orography(orography)

        phase_change = None
        slice_list = ["height", y_coord, x_coord]
        for wb_integral, wet_bulb_temp in zip(
                wet_bulb_integral.slices(slice_list),
                wet_bulb_temperature.slices(slice_list),
        ):
            phase_change_data = self._calculate_phase_change_level(
                wet_bulb_temp.data,
                wb_integral.data,
                orography.data,
                max_nbhood_orog.data,
                land_sea_data,
                wbt_height_points,
                wb_integral.coord("height").points,
                highest_height,
            )

            # preserve dimensionality of input cube (in case of scalar or
            # length 1 dimensions)
            if phase_change is None:
                phase_change = phase_change_data
            elif not isinstance(phase_change, list):
                phase_change = [phase_change]
                phase_change.append(phase_change_data)
            else:
                phase_change.append(phase_change_data)

        phase_change_level = self.create_phase_change_level_cube(
            wet_bulb_temperature,
            np.ma.masked_array(phase_change, dtype=np.float32))

        return phase_change_level
class Test_process(IrisTest):
    """Test process method"""
    def setUp(self):
        """
        Set up a basic cube and linear weights cube for the process
        method. Input cube has 2 thresholds on and 3
        forecast_reference_times
        """
        thresholds = [10, 20]
        data = np.ones((2, 2, 3), dtype=np.float32)
        cycle1 = set_up_probability_cube(
            data,
            thresholds,
            spatial_grid="equalarea",
            time=datetime(2017, 11, 10, 4, 0),
            frt=datetime(2017, 11, 10, 0, 0),
        )
        cycle2 = set_up_probability_cube(
            data,
            thresholds,
            spatial_grid="equalarea",
            time=datetime(2017, 11, 10, 4, 0),
            frt=datetime(2017, 11, 10, 1, 0),
        )
        cycle3 = set_up_probability_cube(
            data,
            thresholds,
            spatial_grid="equalarea",
            time=datetime(2017, 11, 10, 4, 0),
            frt=datetime(2017, 11, 10, 2, 0),
        )
        self.cube_to_collapse = CubeList([cycle1, cycle2, cycle3]).merge_cube()
        self.cube_to_collapse = squeeze(self.cube_to_collapse)
        self.cube_to_collapse.rename("weights")
        # This input array has 3 forecast reference times and 2 thresholds.
        # The two thresholds have the same weights.
        self.cube_to_collapse.data = np.array(
            [[[[1, 0, 1], [1, 1, 1]], [[1, 0, 1], [1, 1, 1]]],
             [[[0, 0, 1], [0, 1, 1]], [[0, 0, 1], [0, 1, 1]]],
             [[[1, 1, 1], [1, 1, 1]], [[1, 1, 1], [1, 1, 1]]]],
            dtype=np.float32)
        self.cube_to_collapse.data = np.ma.masked_equal(
            self.cube_to_collapse.data, 0)
        # Create a one_dimensional weights cube by slicing the larger
        # weights cube.
        # The resulting cube only has a forecast_reference_time coordinate.
        self.one_dimensional_weights_cube = self.cube_to_collapse[:, 0, 0, 0]
        self.one_dimensional_weights_cube.remove_coord(
            "projection_x_coordinate")
        self.one_dimensional_weights_cube.remove_coord(
            "projection_y_coordinate")
        self.one_dimensional_weights_cube.remove_coord(
            find_threshold_coordinate(self.one_dimensional_weights_cube))
        self.one_dimensional_weights_cube.data = np.array([0.2, 0.5, 0.3],
                                                          dtype=np.float32)
        self.plugin = SpatiallyVaryingWeightsFromMask(fuzzy_length=4)

    @ManageWarnings(record=True)
    def test_none_masked(self, warning_list=None):
        """Test when we have no masked data in the input cube."""
        self.cube_to_collapse.data = np.ones(self.cube_to_collapse.data.shape)
        self.cube_to_collapse.data = np.ma.masked_equal(
            self.cube_to_collapse.data, 0)
        expected_data = np.array([[[0.2, 0.2, 0.2], [0.2, 0.2, 0.2]],
                                  [[0.5, 0.5, 0.5], [0.5, 0.5, 0.5]],
                                  [[0.3, 0.3, 0.3], [0.3, 0.3, 0.3]]],
                                 dtype=np.float32)
        message = ("Input cube to SpatiallyVaryingWeightsFromMask "
                   "must be masked")
        result = self.plugin.process(self.cube_to_collapse,
                                     self.one_dimensional_weights_cube,
                                     "forecast_reference_time")
        self.assertTrue(any(message in str(item) for item in warning_list))
        self.assertArrayEqual(result.data, expected_data)
        self.assertEqual(result.dtype, np.float32)

    @ManageWarnings(
        ignored_messages=["Collapsing a non-contiguous coordinate."])
    def test_all_masked(self):
        """Test when we have all masked data in the input cube."""
        self.cube_to_collapse.data = np.ones(self.cube_to_collapse.data.shape)
        self.cube_to_collapse.data = np.ma.masked_equal(
            self.cube_to_collapse.data, 1)
        result = self.plugin.process(self.cube_to_collapse,
                                     self.one_dimensional_weights_cube,
                                     "forecast_reference_time")
        expected_data = np.zeros((3, 2, 3))
        self.assertArrayAlmostEqual(expected_data, result.data)
        self.assertTrue(result.metadata, self.cube_to_collapse.data)

    @ManageWarnings(
        ignored_messages=["Collapsing a non-contiguous coordinate."])
    def test_no_fuzziness_no_one_dimensional_weights(self):
        """Test a simple case where we have no fuzziness in the spatial
        weights and no adjustment from the one_dimensional weights."""
        plugin = SpatiallyVaryingWeightsFromMask(fuzzy_length=1)
        self.one_dimensional_weights_cube.data = np.ones((3))
        expected_result = np.array(
            [[[0.5, 0., 0.33333333], [0.5, 0.33333333, 0.33333333]],
             [[0., 0., 0.33333333], [0., 0.33333333, 0.33333333]],
             [[0.5, 1., 0.33333333], [0.5, 0.33333333, 0.33333333]]],
            dtype=np.float32)
        result = plugin.process(self.cube_to_collapse,
                                self.one_dimensional_weights_cube,
                                "forecast_reference_time")
        self.assertArrayAlmostEqual(result.data, expected_result)
        self.assertEqual(result.metadata, self.cube_to_collapse.metadata)

    @ManageWarnings(
        ignored_messages=["Collapsing a non-contiguous coordinate."])
    def test_no_fuzziness_no_one_dimensional_weights_transpose(self):
        """Test a simple case where we have no fuzziness in the spatial
        weights and no adjustment from the one_dimensional weights and
        transpose the input cube."""
        plugin = SpatiallyVaryingWeightsFromMask(fuzzy_length=1)
        self.one_dimensional_weights_cube.data = np.ones((3))
        expected_result = np.array(
            [[[0.5, 0., 0.33333333], [0.5, 0.33333333, 0.33333333]],
             [[0., 0., 0.33333333], [0., 0.33333333, 0.33333333]],
             [[0.5, 1., 0.33333333], [0.5, 0.33333333, 0.33333333]]],
            dtype=np.float32)
        self.cube_to_collapse.transpose([2, 0, 1, 3])
        result = plugin.process(self.cube_to_collapse,
                                self.one_dimensional_weights_cube,
                                "forecast_reference_time")
        self.assertArrayAlmostEqual(result.data, expected_result)
        self.assertEqual(result.metadata, self.cube_to_collapse.metadata)

    @ManageWarnings(
        ignored_messages=["Collapsing a non-contiguous coordinate."])
    def test_no_fuzziness_with_one_dimensional_weights(self):
        """Test a simple case where we have no fuzziness in the spatial
        weights and an adjustment from the one_dimensional weights."""
        plugin = SpatiallyVaryingWeightsFromMask(fuzzy_length=1)
        expected_result = np.array([[[0.4, 0., 0.2], [0.4, 0.2, 0.2]],
                                    [[0., 0., 0.5], [0., 0.5, 0.5]],
                                    [[0.6, 1., 0.3], [0.6, 0.3, 0.3]]],
                                   dtype=np.float32)
        result = plugin.process(self.cube_to_collapse,
                                self.one_dimensional_weights_cube,
                                "forecast_reference_time")
        self.assertArrayAlmostEqual(result.data, expected_result)
        self.assertEqual(result.metadata, self.cube_to_collapse.metadata)

    @ManageWarnings(
        ignored_messages=["Collapsing a non-contiguous coordinate."])
    def test_fuzziness_no_one_dimensional_weights(self):
        """Test a simple case where we have some fuzziness in the spatial
        weights and no adjustment from the one_dimensional weights."""
        plugin = SpatiallyVaryingWeightsFromMask(fuzzy_length=2)
        self.one_dimensional_weights_cube.data = np.ones((3))
        expected_result = np.array(
            [[[0.33333334, 0., 0.25], [0.41421354, 0.25, 0.2928932]],
             [[0., 0., 0.25], [0., 0.25, 0.2928932]],
             [[0.6666667, 1., 0.5], [0.5857864, 0.5, 0.41421354]]],
            dtype=np.float32)
        result = plugin.process(self.cube_to_collapse,
                                self.one_dimensional_weights_cube,
                                "forecast_reference_time")
        self.assertArrayAlmostEqual(result.data, expected_result)
        self.assertEqual(result.metadata, self.cube_to_collapse.metadata)

    @ManageWarnings(
        ignored_messages=["Collapsing a non-contiguous coordinate."])
    def test_fuzziness_with_one_dimensional_weights(self):
        """Test a simple case where we have some fuzziness in the spatial
        sweights and with adjustment from the one_dimensional weights."""
        plugin = SpatiallyVaryingWeightsFromMask(fuzzy_length=2)
        expected_result = np.array(
            [[[0.25, 0., 0.15384616], [0.32037723, 0.15384616, 0.17789416]],
             [[0., 0., 0.3846154], [0., 0.3846154, 0.44473538]],
             [[0.75, 1., 0.4615385], [0.6796227, 0.4615385, 0.3773705]]],
            dtype=np.float32)
        result = plugin.process(self.cube_to_collapse,
                                self.one_dimensional_weights_cube,
                                "forecast_reference_time")
        self.assertArrayAlmostEqual(result.data, expected_result)
        self.assertEqual(result.metadata, self.cube_to_collapse.metadata)
Beispiel #28
0
class SetupNormalInputs(SetupInputs, SetupCubes):
    """Create a class for setting up cubes for testing."""
    @ManageWarnings(
        ignored_messages=["Collapsing a non-contiguous coordinate."],
        warning_types=[UserWarning],
    )
    def setUp(self):
        """Set up expected inputs."""
        super().setUp()
        # Set up cubes and associated data arrays for temperature.
        self.forecast_predictor_mean = CubeList([
            self.historic_temperature_forecast_cube.collapsed(
                "realization", iris.analysis.MEAN)
        ])
        self.forecast_predictor_realizations = CubeList([
            (self.historic_temperature_forecast_cube.copy())
        ])
        self.forecast_predictor_spot = CubeList([
            self.historic_forecast_spot_cube.collapsed("realization",
                                                       iris.analysis.MEAN)
        ])

        self.fp_additional_predictor_spot = CubeList(
            [self.forecast_predictor_spot[0].copy()])
        self.fp_additional_predictor_spot.extend([self.spot_altitude_cube])

        self.forecast_variance = self.historic_temperature_forecast_cube.collapsed(
            "realization", iris.analysis.VARIANCE)
        self.forecast_variance_spot = self.forecast_predictor_spot[0].copy()
        self.forecast_variance_spot.data = self.forecast_variance_spot.data / 10.0

        self.truth = self.historic_temperature_forecast_cube.collapsed(
            "realization", iris.analysis.MAX)
        self.forecast_predictor_data = (
            self.forecast_predictor_mean[0].data.flatten().astype(np.float64))
        self.forecast_predictor_data_realizations = convert_cube_data_to_2d(
            self.historic_temperature_forecast_cube.copy()).astype(np.float64)
        self.forecast_variance_data = self.forecast_variance.data.flatten(
        ).astype(np.float64)
        self.truth_data = self.truth.data.flatten().astype(np.float64)

        spatial_product = np.prod(self.truth.shape[-2:])
        self.initial_guess_spot_mean = np.broadcast_to(
            self.initial_guess_for_mean,
            (
                spatial_product,
                len(self.initial_guess_for_mean),
            ),
        )
        self.initial_guess_spot_realizations = np.broadcast_to(
            self.initial_guess_for_realization,
            (
                spatial_product,
                len(self.initial_guess_for_realization),
            ),
        )
        self.ig_spot_mean_additional_predictor = np.broadcast_to(
            self.initial_guess_mean_additional_predictor,
            (
                spatial_product,
                len(self.initial_guess_mean_additional_predictor),
            ),
        )
Beispiel #29
0
 def test_fail(self):
     units = Unit("days since 1970-01-02 00:00:00", calendar="gregorian")
     cube2 = Cube([1, 2, 3], "air_temperature", units="K")
     cube2.add_dim_coord(DimCoord([0, 1, 2], "time", units=units), 0)
     with self.assertRaises(iris.exceptions.ConcatenateError):
         CubeList([self.cube1, cube2]).concatenate_cube()
Beispiel #30
0
 def test_merge_cell_measure_aware(self):
     (cube1, ) = iris.load_raw(self.fname)
     (cube2, ) = iris.load_raw(self.fname)
     cube2._cell_measures_and_dims[0][0].var_name = "not_areat"
     cubes = CubeList([cube1, cube2]).merge()
     self.assertEqual(len(cubes), 2)
Beispiel #31
0
    def __init__(self, cartesian=True, **kw_vars):
        """
        Parameters
        ----------
        cartesian: bool (default True)
            Cartesian coord system flag
        **kw_vars: dict of iris cubes
            meteorological parameters

        Examples
        --------
        Initialise an `AtmosFlow` object with 3 wind components
        >>> AF = AtmosFlow(u=u_cart, v=v_cart, w=w_cart)
        and calculate relative vorticity:
        >>> rv = AF.rel_vort
        """
        self.__dict__.update(kw_vars)
        self.cartesian = cartesian

        self.cubes = CubeList(filter(iscube, self.__dict__.values()))
        self.main_cubes = CubeList(filter(iscube_and_not_scalar,
                                          self.__dict__.values()))
        self.wind_cmpnt = CubeList(filter(None,
                                          [getattr(self, 'u', None),
                                           getattr(self, 'v', None),
                                           getattr(self, 'w', None)]))
        thecube = self.main_cubes[0]

        check_coords(self.main_cubes)

        # Get the dim_coord, or None if none exist, for the xyz dimensions
        self.xcoord = thecube.coord(axis='X', dim_coords=True)
        self.ycoord = thecube.coord(axis='Y', dim_coords=True)
        self.zcoord = thecube.coord(axis='Z')
        if self.zcoord.units.is_convertible('Pa'):
            # Check if the vertical coordinate is pressure
            self.zmode = 'pcoord'
            for cube in self.main_cubes:
                if self.zcoord in cube.dim_coords:
                    clean_pressure_coord(cube)
            self.pres = coords.pres_coord_to_cube(thecube)
            self.cubes.append(self.pres)

        if not hasattr(self, 'lats'):
            try:
                _, lats = grid.unrotate_lonlat_grids(thecube)
            except (ValueError, AttributeError):
                lats = np.array([45.])
            self.lats = Cube(lats,
                             units='degrees',
                             standard_name='latitude')
        self.fcor = mcalc.coriolis_parameter(self.lats)
        self.fcor.convert_units('s-1')

        if self.cartesian:
            for ax, rot_name in zip(('x',  'y'),
                                    ('grid_longitude', 'grid_latitude')):
                for cube in self.cubes:
                    if rot_name in [i.name() for i in cube.coords(axis=ax)]:
                        cube.remove_coord(rot_name)

            try:
                _dx = thecube.attributes['um_res'].to_flt('m')
            except KeyError:
                _dx = 1.
            self.dx = Cube(_dx, units='m')
            self.dy = Cube(_dx, units='m')

        # Non-spherical coords?
        # self.horiz_cs = thecube.coord(axis='x', dim_coords=True).coord_system
        self.horiz_cs = thecube.coord_system()
        self._spherical_coords = isinstance(self.horiz_cs,
                                            (iris.coord_systems.GeogCS,
                                             iris.coord_systems.RotatedGeogCS))
        # todo: interface for spherical coordinates switch?
        # assert not self._spherical_coords,\
        #     'Only non-spherical coordinates are allowed ...'
        if self.cartesian and self._spherical_coords:
            warnings.warn('Cubes are in spherical coordinates!'
                          '\n Use `replace_lonlat_dimcoord_with_cart function`'
                          ' to change coordinates!')
Beispiel #32
0
def save_regridder(rg, filename):
    """
    Save a regridder scheme instance.

    Saves either a
    :class:`~esmf_regrid.experimental.unstructured_scheme.GridToMeshESMFRegridder`
    or a
    :class:`~esmf_regrid.experimental.unstructured_scheme.MeshToGridESMFRegridder`.

    Parameters
    ----------
    rg : :class:`~esmf_regrid.experimental.unstructured_scheme.GridToMeshESMFRegridder` or :class:`~esmf_regrid.experimental.unstructured_scheme.MeshToGridESMFRegridder`
        The regridder instance to save.
    filename : str
        The file name to save to.
    """
    regridder_type = rg.__class__.__name__

    def _standard_grid_cube(grid, name):
        if grid[0].ndim == 1:
            shape = [coord.points.size for coord in grid]
        else:
            shape = grid[0].shape
        data = np.zeros(shape)
        cube = Cube(data, var_name=name, long_name=name)
        if grid[0].ndim == 1:
            cube.add_dim_coord(grid[0], 0)
            cube.add_dim_coord(grid[1], 1)
        else:
            cube.add_aux_coord(grid[0], [0, 1])
            cube.add_aux_coord(grid[1], [0, 1])
        return cube

    if regridder_type == "GridToMeshESMFRegridder":
        src_grid = (rg.grid_y, rg.grid_x)
        src_cube = _standard_grid_cube(src_grid, SOURCE_NAME)

        tgt_mesh = rg.mesh
        tgt_location = rg.location
        tgt_mesh_coords = tgt_mesh.to_MeshCoords(tgt_location)
        tgt_data = np.zeros(tgt_mesh_coords[0].points.shape[0])
        tgt_cube = Cube(tgt_data, var_name=TARGET_NAME, long_name=TARGET_NAME)
        for coord in tgt_mesh_coords:
            tgt_cube.add_aux_coord(coord, 0)
    elif regridder_type == "MeshToGridESMFRegridder":
        src_mesh = rg.mesh
        src_location = rg.location
        src_mesh_coords = src_mesh.to_MeshCoords(src_location)
        src_data = np.zeros(src_mesh_coords[0].points.shape[0])
        src_cube = Cube(src_data, var_name=SOURCE_NAME, long_name=SOURCE_NAME)
        for coord in src_mesh_coords:
            src_cube.add_aux_coord(coord, 0)

        tgt_grid = (rg.grid_y, rg.grid_x)
        tgt_cube = _standard_grid_cube(tgt_grid, TARGET_NAME)
    else:
        msg = (f"Expected a regridder of type `GridToMeshESMFRegridder` or "
               f"`MeshToGridESMFRegridder`, got type {regridder_type}.")
        raise TypeError(msg)

    method = rg.method
    resolution = rg.resolution

    weight_matrix = rg.regridder.weight_matrix
    reformatted_weight_matrix = scipy.sparse.coo_matrix(weight_matrix)
    weight_data = reformatted_weight_matrix.data
    weight_rows = reformatted_weight_matrix.row
    weight_cols = reformatted_weight_matrix.col
    weight_shape = reformatted_weight_matrix.shape

    esmf_version = rg.regridder.esmf_version
    esmf_regrid_version = rg.regridder.esmf_regrid_version
    save_version = esmf_regrid.__version__

    # Currently, all schemes use the fracarea normalization.
    normalization = "fracarea"

    mdtol = rg.mdtol
    attributes = {
        "title": "iris-esmf-regrid regridding scheme",
        REGRIDDER_TYPE: regridder_type,
        VERSION_ESMF: esmf_version,
        VERSION_INITIAL: esmf_regrid_version,
        "esmf_regrid_version_on_save": save_version,
        "normalization": normalization,
        MDTOL: mdtol,
        METHOD: method,
    }
    if resolution is not None:
        attributes[RESOLUTION] = resolution

    weights_cube = Cube(weight_data,
                        var_name=WEIGHTS_NAME,
                        long_name=WEIGHTS_NAME)
    row_coord = AuxCoord(weight_rows,
                         var_name=WEIGHTS_ROW_NAME,
                         long_name=WEIGHTS_ROW_NAME)
    col_coord = AuxCoord(weight_cols,
                         var_name=WEIGHTS_COL_NAME,
                         long_name=WEIGHTS_COL_NAME)
    weights_cube.add_aux_coord(row_coord, 0)
    weights_cube.add_aux_coord(col_coord, 0)

    weight_shape_cube = Cube(
        weight_shape,
        var_name=WEIGHTS_SHAPE_NAME,
        long_name=WEIGHTS_SHAPE_NAME,
    )

    # Avoid saving bug by placing the mesh cube second.
    # TODO: simplify this when this bug is fixed in iris.
    if regridder_type == "GridToMeshESMFRegridder":
        cube_list = CubeList(
            [src_cube, tgt_cube, weights_cube, weight_shape_cube])
    elif regridder_type == "MeshToGridESMFRegridder":
        cube_list = CubeList(
            [tgt_cube, src_cube, weights_cube, weight_shape_cube])

    for cube in cube_list:
        cube.attributes = attributes

    iris.fileformats.netcdf.save(cube_list, filename)
Beispiel #33
0
def process_diagnostic(diagnostics, neighbours, sites, ancillary_data,
                       diagnostic_name):
    """
    Extract data and write output for a given diagnostic.

    Args:
        diagnostics (dict):
            Dictionary containing information regarding how the diagnostics
            are to be processed.

            For example::

              {
                  "temperature": {
                      "diagnostic_name": "air_temperature",
                      "extrema": true,
                      "filepath": "temperature_at_screen_level",
                      "interpolation_method":
                          "model_level_temperature_lapse_rate",
                      "neighbour_finding": {
                          "land_constraint": false,
                          "method": "fast_nearest_neighbour",
                          "vertical_bias": null
                      }
                  }
              }

        neighbours (numpy.array):
            Array of neigbouring grid points that are associated with sites
            in the SortedDictionary of sites.

        sites (dict):
            A dictionary containing the properties of spotdata sites.

        ancillary_data (dict):
            A dictionary containing additional model data that is needed.
            e.g. {'orography': <cube of orography>}

        diagnostic_name (string):
            A string matching the keys in the diagnostics dictionary that
            will be used to access information regarding how the diagnostic
            is to be processed.

    Returns:
        (tuple): tuple containing:
            **resulting_cube** (iris.cube.Cube or None):
                Cube after extracting the diagnostic requested using the
                desired extraction method.
                None is returned if the "resulting_cubes" is an empty CubeList
                after processing.
            **extrema_cubes** (iris.cube.CubeList or None):
                CubeList containing extrema values, if the 'extrema' diagnostic
                is requested.
                None is returned if the value for diagnostic_dict["extrema"]
                is False, so that the extrema calculation is not required.

    """
    diagnostic_dict = diagnostics[diagnostic_name]

    # Grab the relevant set of grid point neighbours for the neighbour finding
    # method being used by this diagnostic.
    neighbour_hash = (construct_neighbour_hash(
        diagnostic_dict['neighbour_finding']))
    neighbour_list = neighbours[neighbour_hash]

    # Get optional kwargs that may be set to override defaults.
    optionals = [
        'upper_level', 'lower_level', 'no_neighbours', 'dz_tolerance',
        'dthetadz_threshold', 'dz_max_adjustment'
    ]
    kwargs = {}
    if ancillary_data.get('config_constants') is not None:
        for optional in optionals:
            constant = ancillary_data.get('config_constants').get(optional)
            if constant is not None:
                kwargs[optional] = constant

    # Create a list of datetimes to loop through.
    forecast_times = []
    for cube in diagnostic_dict["data"]:
        time = cube.coord("time")
        forecast_times.extend(time.units.num2date(time.points))

    # Create empty iris.cube.CubeList to hold extracted data cubes.
    resulting_cubes = CubeList()

    # Loop over forecast times.
    for a_time in forecast_times:
        # Extract Cube from CubeList at current time.
        time_extract = datetime_constraint(a_time)
        cube = extract_cube_at_time(diagnostic_dict["data"], a_time,
                                    time_extract)
        if cube is None:
            # If no cube is available at given time, try the next time.
            continue

        ad = {}
        if diagnostic_dict["additional_data"] is not None:
            # Extract additional diagnostics at current time.
            ad = extract_ad_at_time(diagnostic_dict["additional_data"], a_time,
                                    time_extract)

        args = (cube, sites, neighbour_list, ancillary_data, ad)

        # Extract diagnostic data using defined method.
        resulting_cubes.append(
            ExtractData(diagnostic_dict['interpolation_method']).process(
                *args, **kwargs))

    if resulting_cubes:
        # Concatenate CubeList into Cube for cubes with different
        # forecast times.
        resulting_cube = resulting_cubes.concatenate_cube()
    else:
        resulting_cube = None

    if diagnostic_dict['extrema']:
        extrema_cubes = (ExtractExtrema(24, start_hour=9).process(
            resulting_cube.copy()))
        extrema_cubes = extrema_cubes.merge()
    else:
        extrema_cubes = None

    return resulting_cube, extrema_cubes
Beispiel #34
0
    [1.0, 2.0, -1.0, 2.0],
    standard_name=O_NAME,
)
O_CUBE_2 = iris.cube.Cube(
    [1.0, -1.0, 3.0],
    standard_name=O_NAME,
)
L_CUBE = iris.cube.Cube(
    [10.0, 20.0, 0.0],
    standard_name=L_NAME,
)
FRAC_O = np.array([0.0, 1.0, 0.5, 0.3])
FRAC_L = np.array([0.1, 0.0, 1.0])

GET_LAND_FRACTION = [
    (CubeList([L_CUBE]), L_NAME, False, None),
    (CubeList([L_CUBE]), L_NAME, True, None),
    (CubeList([SFTLF_CUBE, L_CUBE]), L_NAME, False, FRAC_L),
    (CubeList([SFTLF_CUBE, O_CUBE_1]), O_NAME, False, None),
    (CubeList([SFTLF_CUBE, O_CUBE_1]), O_NAME, True, None),
    (CubeList([SFTLF_CUBE, O_CUBE_2]), O_NAME, False, FRAC_L),
    (CubeList([SFTLF_CUBE, O_CUBE_2]), O_NAME, True, FRAC_L),
    (CubeList([SFTOF_CUBE, L_CUBE]), L_NAME, False, None),
    (CubeList([SFTOF_CUBE, L_CUBE]), L_NAME, True, None),
    (CubeList([SFTOF_CUBE, O_CUBE_1]), O_NAME, False, None),
    (CubeList([SFTOF_CUBE, O_CUBE_1]), O_NAME, True, FRAC_O),
    (CubeList([SFTOF_CUBE, O_CUBE_2]), O_NAME, True, None),
    (CubeList([SFTOF_CUBE, SFTLF_CUBE, O_CUBE_2]), O_NAME, True, FRAC_L),
    (CubeList([SFTOF_CUBE, SFTLF_CUBE, O_CUBE_2]), O_NAME, False, FRAC_L),
]
Beispiel #35
0
def process(*cubes: cli.inputcube,
            coordinate,
            central_point: float,
            units=None,
            width: float = None,
            calendar='gregorian',
            blend_time_using_forecast_period=False):
    """Runs weighted blending across adjacent points.

    Uses the TriangularWeightedBlendAcrossAdjacentPoints to blend across
    a particular coordinate. It does not collapse the coordinate, but
    instead blends across adjacent points and puts the blended values back
    in the original coordinate, with adjusted bounds.

    Args:
        cubes (list of iris.cube.Cube):
            A list of cubes including and surrounding the central point.
        coordinate (str):
            The coordinate over which the blending will be applied.
        central_point (float):
            Central point at which the output from the triangular weighted
            blending will be calculated. This should be in the units of the
            units argument that is passed in. This value should be a point
            on the coordinate for blending over.
        units (str):
            Units of the central_point and width
        width (float):
            Width of the triangular weighting function used in the blending,
            in the units of the units argument.
        calendar (str)
            Calendar for parameter_unit if required.
        blend_time_using_forecast_period (bool):
            If True, we are blending over time but using the forecast
            period coordinate as a proxy. Note, this should only be used when
            time and forecast_period share a dimension: i.e when all cubes
            provided are from the same forecast cycle.

    Returns:
        iris.cube.Cube:
            A processed Cube

    Raises:
        ValueError:
            If coordinate has "time" in it.
        ValueError:
            If blend_time_forecast_period is not used with forecast_period
            coordinate.

    """
    from cf_units import Unit
    from iris.cube import CubeList

    from improver.blending.blend_across_adjacent_points import \
        TriangularWeightedBlendAcrossAdjacentPoints
    from improver.utilities.cube_manipulation import MergeCubes

    # TriangularWeightedBlendAcrossAdjacentPoints can't currently handle
    # blending over times where iris reads the coordinate points as datetime
    # objects. Fail here to avoid unhelpful errors downstream.
    if "time" in coordinate:
        msg = ("Cannot blend over {} coordinate (points encoded as datetime "
               "objects)".format(coordinate))
        raise ValueError(msg)

    # This is left as a placeholder for when we have this capability
    if coordinate == 'time':
        units = Unit(units, calendar)

    cubes = CubeList(cubes)

    if blend_time_using_forecast_period and coordinate == 'forecast_period':
        cube = MergeCubes()(cubes, check_time_bounds_ranges=True)
    elif blend_time_using_forecast_period:
        msg = ('"--blend-time-using-forecast-period" can only be used with '
               '"forecast_period" coordinate')
        raise ValueError(msg)
    else:
        cube = MergeCubes()(cubes)

    blending_plugin = TriangularWeightedBlendAcrossAdjacentPoints(
        coordinate, central_point, units, width)
    result = blending_plugin(cube)
    return result
Beispiel #36
0
    def setUp(self):
        """
        Create a cube containing a regular lat-lon grid.

        Data is striped horizontally,
        e.g.
              1 1 1 1 1 1
              1 1 1 1 1 1
              2 2 2 2 2 2
              2 2 2 2 2 2
              3 3 3 3 3 3
              3 3 3 3 3 3
        """
        data = np.ones((12, 12))
        data[0:4, :] = 1
        data[4:8, :] = 2
        data[8:, :] = 3

        latitudes = np.linspace(-90, 90, 12)
        longitudes = np.linspace(-180, 180, 12)
        latitude = DimCoord(
            latitudes,
            standard_name="latitude",
            units="degrees",
            coord_system=GeogCS(6371229.0),
        )
        longitude = DimCoord(
            longitudes,
            standard_name="longitude",
            units="degrees",
            coord_system=GeogCS(6371229.0),
            circular=True,
        )

        # Use time of 2017-02-17 06:00:00
        time = DimCoord(
            [1487311200],
            standard_name="time",
            units=cf_units.Unit("seconds since 1970-01-01 00:00:00",
                                calendar="gregorian"),
        )
        long_time_coord = DimCoord(
            list(range(1487311200, 1487397600, 3600)),
            standard_name="time",
            units=cf_units.Unit("seconds since 1970-01-01 00:00:00",
                                calendar="gregorian"),
        )

        time_dt = dt(2017, 2, 17, 6, 0)
        time_extract = Constraint(
            time=lambda cell: cell.point == PartialDateTime(
                time_dt.year, time_dt.month, time_dt.day, time_dt.hour))

        cube = Cube(
            data.reshape((1, 12, 12)),
            long_name="air_temperature",
            dim_coords_and_dims=[(time, 0), (latitude, 1), (longitude, 2)],
            units="K",
        )

        long_cube = Cube(
            np.arange(3456).reshape(24, 12, 12),
            long_name="air_temperature",
            dim_coords_and_dims=[(long_time_coord, 0), (latitude, 1),
                                 (longitude, 2)],
            units="K",
        )

        orography = Cube(
            np.ones((12, 12)),
            long_name="surface_altitude",
            dim_coords_and_dims=[(latitude, 0), (longitude, 1)],
            units="m",
        )

        # Western half of grid at altitude 0, eastern half at 10.
        # Note that the pressure_on_height_levels data is left unchanged,
        # so it is as if there is a sharp front running up the grid with
        # differing pressures on either side at equivalent heights above
        # the surface (e.g. east 1000hPa at 0m AMSL, west 1000hPa at 10m AMSL).
        # So there is higher pressure in the west.
        orography.data[0:10] = 0
        orography.data[10:] = 10
        ancillary_data = {}
        ancillary_data["orography"] = orography

        additional_data = {}
        adlist = CubeList()
        adlist.append(cube)
        additional_data["air_temperature"] = adlist

        data_indices = [list(data.nonzero()[0]), list(data.nonzero()[1])]

        self.cube = cube
        self.long_cube = long_cube
        self.data = data
        self.time_dt = time_dt
        self.time_extract = time_extract
        self.data_indices = data_indices
        self.ancillary_data = ancillary_data
        self.additional_data = additional_data
Beispiel #37
0
 def setUp(self):
     self.cubes = CubeList([iris.tests.stock.lat_lon_cube()])
Beispiel #38
0
    def create_data_object(self, filenames, variable, index_offset=1):
        from cis.data_io.hdf_vd import get_data
        from cis.data_io.hdf_vd import VDS
        from pyhdf.error import HDF4Error
        from cis.data_io import hdf_sd
        from iris.coords import DimCoord, AuxCoord
        from iris.cube import Cube, CubeList
        from cis.data_io.gridded_data import GriddedData
        from cis.time_util import cis_standard_time_unit
        from datetime import datetime
        from iris.util import new_axis
        import numpy as np

        logging.debug("Creating data object for variable " + variable)

        variables = ["Pressure_Mean"]
        logging.info("Listing coordinates: " + str(variables))

        variables.append(variable)

        # reading data from files
        sdata = {}
        for filename in filenames:
            try:
                sds_dict = hdf_sd.read(filename, variables)
            except HDF4Error as e:
                raise IOError(str(e))

            for var in list(sds_dict.keys()):
                utils.add_element_to_list_in_dict(sdata, var, sds_dict[var])

        # work out size of data arrays
        # the coordinate variables will be reshaped to match that.
        # NOTE: This assumes that all Caliop_L1 files have the same altitudes.
        #       If this is not the case, then the following line will need to be changed
        #       to concatenate the data from all the files and not just arbitrarily pick
        #       the altitudes from the first file.
        alt_data = self._get_calipso_data(hdf_sd.HDF_SDS(filenames[0], 'Altitude_Midpoint'))[0, :]
        alt_coord = DimCoord(alt_data, standard_name='altitude', units='km')
        alt_coord.convert_units('m')

        lat_data = self._get_calipso_data(hdf_sd.HDF_SDS(filenames[0], 'Latitude_Midpoint'))[0, :]
        lat_coord = DimCoord(lat_data, standard_name='latitude', units='degrees_north')

        lon_data = self._get_calipso_data(hdf_sd.HDF_SDS(filenames[0], 'Longitude_Midpoint'))[0, :]
        lon_coord = DimCoord(lon_data, standard_name='longitude', units='degrees_east')

        cubes = CubeList()
        for f in filenames:
            t = get_data(VDS(f, "Nominal_Year_Month"), True)[0]
            time_data = cis_standard_time_unit.date2num(datetime(int(t[0:4]), int(t[4:6]), 15))
            time_coord = AuxCoord(time_data, long_name='Profile_Time', standard_name='time',
                                  units=cis_standard_time_unit)

            # retrieve data + its metadata
            var = sdata[variable]
            metadata = hdf.read_metadata(var, "SD")

            data = self._get_calipso_data(hdf_sd.HDF_SDS(f, variable))

            pres_data = self._get_calipso_data(hdf_sd.HDF_SDS(f, 'Pressure_Mean'))
            pres_coord = AuxCoord(pres_data, standard_name='air_pressure', units='hPa')

            if data.ndim == 2:
                # pres_coord = new_axis()
                cube = Cube(data, long_name=metadata.long_name or variable, units=self.clean_units(metadata.units),
                            dim_coords_and_dims=[(lat_coord, 0), (lon_coord, 1)],
                            aux_coords_and_dims=[(time_coord, ())])
                # Promote the time scalar coord to a length one dimension
                new_cube = new_axis(cube, 'time')
                cubes.append(new_cube)
            elif data.ndim == 3:
                # pres_coord = new_axis()
                cube = Cube(data, long_name=metadata.long_name or variable, units=self.clean_units(metadata.units),
                            dim_coords_and_dims=[(lat_coord, 0), (lon_coord, 1), (alt_coord, 2)],
                            aux_coords_and_dims=[(time_coord, ())])
                # Promote the time scalar coord to a length one dimension
                new_cube = new_axis(cube, 'time')
                # Then add the (extended) pressure coord so that it is explicitly a function of time
                new_cube.add_aux_coord(pres_coord[np.newaxis, ...], (0, 1, 2, 3))
                cubes.append(new_cube)
            else:
                raise ValueError("Unexpected number of dimensions for CALIOP data: {}".format(data.ndim))


        # Concatenate the cubes from each file into a single GriddedData object
        gd = GriddedData.make_from_cube(cubes.concatenate_cube())
        return gd
Beispiel #39
0
def segmentation(features,
                 field,
                 dxy,
                 threshold=3e-3,
                 target='maximum',
                 level=None,
                 method='watershed',
                 max_distance=None,
                 vertical_coord='auto'):
    """
    Function using watershedding or random walker to determine cloud volumes associated with tracked updrafts
    
    Parameters:
    features:         pandas.DataFrame 
                   output from trackpy/maketrack
    field:      iris.cube.Cube 
                   containing the field to perform the watershedding on 
    threshold:  float 
                   threshold for the watershedding field to be used for the mask
                   
    target:        string
                   Switch to determine if algorithm looks strating from maxima or minima in input field (maximum: starting from maxima (default), minimum: starting from minima)

    level          slice
                   levels at which to seed the cells for the watershedding algorithm
    method:        str ('method')
                   flag determining the algorithm to use (currently watershedding implemented)
                   
    max_distance: float
                  Maximum distance from a marker allowed to be classified as belonging to that cell
    
    Output:
    segmentation_out: iris.cube.Cube
                   Cloud mask, 0 outside and integer numbers according to track inside the cloud
    """
    import pandas as pd
    from iris.cube import CubeList

    logging.info('Start watershedding 3D')

    # check input for right dimensions:
    if not (field.ndim == 3 or field.ndim == 4):
        raise ValueError(
            'input to segmentation step must be 3D or 4D including a time dimension'
        )
    if 'time' not in [coord.name() for coord in field.coords()]:
        raise ValueError(
            "input to segmentation step must include a dimension named 'time'")

    # CubeList and list to store individual segmentation masks and feature DataFrames with information about segmentation
    segmentation_out_list = CubeList()
    features_out_list = []

    #loop over individual input timesteps for segmentation:
    field_time = field.slices_over('time')
    for i, field_i in enumerate(field_time):
        time_i = field_i.coord('time').units.num2date(
            field_i.coord('time').points[0])
        features_i = features.loc[features['time'] == time_i]
        segmentation_out_i, features_out_i = segmentation_timestep(
            field_i,
            features_i,
            dxy,
            threshold=threshold,
            target=target,
            level=level,
            method=method,
            max_distance=max_distance,
            vertical_coord=vertical_coord)
        segmentation_out_list.append(segmentation_out_i)
        features_out_list.append(features_out_i)
        logging.debug('Finished segmentation for ' +
                      time_i.strftime('%Y-%m-%d_%H:%M:%S'))

    #Merge output from individual timesteps:
    segmentation_out = segmentation_out_list.merge_cube()
    features_out = pd.concat(features_out_list)

    logging.debug('Finished segmentation')
    return segmentation_out, features_out
Beispiel #40
0
    def create_data_object(self, filenames, variable):
        from netCDF4 import Dataset
        from biggus import OrthoArrayAdapter
        from iris.cube import Cube, CubeList
        from iris.coords import DimCoord
        from iris.fileformats.netcdf import NetCDFDataProxy
        from datetime import datetime
        from os.path import basename
        from cis.time_util import cis_standard_time_unit
        from cis.data_io.gridded_data import make_from_cube
        import numpy as np

        cubes = CubeList()

        for f in filenames:
            # Open the file
            ds = Dataset(f)
            # E.g. 'NO2.COLUMN.VERTICAL.TROPOSPHERIC.CS30_BACKSCATTER.SOLAR'
            v = ds.variables[variable]
            # Get the coords
            lat = ds.variables['LATITUDE']
            lon = ds.variables['LONGITUDE']

            # Create a biggus adaptor over the data
            scale_factor = getattr(v, 'scale_factor', None)
            add_offset = getattr(v, 'add_offset', None)
            if scale_factor is None and add_offset is None:
                v_dtype = v.datatype
            elif scale_factor is not None:
                v_dtype = scale_factor.dtype
            else:
                v_dtype = add_offset.dtype
            # proxy = NetCDFDataProxy(v.shape, v_dtype, f, variable, float(v.VAR_FILL_VALUE))
            # a = OrthoArrayAdapter(proxy)
            # Mask out all invalid values (NaN, Inf, etc)
            a = np.ma.masked_invalid(v[:])
            # Set everything negative to NaN
            a = np.ma.masked_less(a, 0.0)

            # Just read the lat and lon in directly
            lat_coord = DimCoord(lat[:], standard_name='latitude', units='degrees', long_name=lat.VAR_DESCRIPTION)
            lon_coord = DimCoord(lon[:], standard_name='longitude', units='degrees', long_name=lon.VAR_DESCRIPTION)

            # Pull the date out of the filename
            fname = basename(f)
            dt = datetime.strptime(fname[:10], "%Y_%m_%d")
            t_coord = DimCoord(cis_standard_time_unit.date2num(dt), standard_name='time', units=cis_standard_time_unit)

            c = Cube(a, long_name=getattr(v, "VAR_DESCRIPTION", None), units=getattr(v, "VAR_UNITS", None),
                     dim_coords_and_dims=[(lat_coord, 0), (lon_coord, 1)])

            c.add_aux_coord(t_coord)

            # Close the file
            ds.close()

            cubes.append(c)

        # We have a scalar time coord and no conflicting metadata so this should just create one cube...
        merged = cubes.merge_cube()

        # Return as a CIS GriddedData object
        return make_from_cube(merged)
Beispiel #41
0
 def setUp(self):
     data = np.zeros((2, 2), dtype=np.float32)
     self.wind_speed_cube = set_up_variable_cube(data, name="wind_speed")
     self.wind_dir_cube = set_up_variable_cube(data,
                                               name="wind_from_direction")
     self.wind_cubes = CubeList([self.wind_speed_cube, self.wind_dir_cube])
Beispiel #42
0
    def create_data_object(self, filenames, variable, index_offset=1):
        from cis.data_io.hdf_vd import get_data
        from cis.data_io.hdf_vd import VDS
        from pyhdf.error import HDF4Error
        from cis.data_io import hdf_sd
        from iris.coords import DimCoord, AuxCoord
        from iris.cube import Cube, CubeList
        from cis.data_io.gridded_data import GriddedData
        from cis.time_util import cis_standard_time_unit
        from datetime import datetime
        from iris.util import new_axis
        import numpy as np

        logging.debug("Creating data object for variable " + variable)

        variables = ["Pressure_Mean"]
        logging.info("Listing coordinates: " + str(variables))

        variables.append(variable)

        # reading data from files
        sdata = {}
        for filename in filenames:
            try:
                sds_dict = hdf_sd.read(filename, variables)
            except HDF4Error as e:
                raise IOError(str(e))

            for var in list(sds_dict.keys()):
                utils.add_element_to_list_in_dict(sdata, var, sds_dict[var])

        # work out size of data arrays
        # the coordinate variables will be reshaped to match that.
        # NOTE: This assumes that all Caliop_L1 files have the same altitudes.
        #       If this is not the case, then the following line will need to be changed
        #       to concatenate the data from all the files and not just arbitrarily pick
        #       the altitudes from the first file.
        alt_data = self._get_calipso_data(
            hdf_sd.HDF_SDS(filenames[0], 'Altitude_Midpoint'))[0, :]
        alt_coord = DimCoord(alt_data, standard_name='altitude', units='km')
        alt_coord.convert_units('m')

        lat_data = self._get_calipso_data(
            hdf_sd.HDF_SDS(filenames[0], 'Latitude_Midpoint'))[0, :]
        lat_coord = DimCoord(lat_data,
                             standard_name='latitude',
                             units='degrees_north')

        lon_data = self._get_calipso_data(
            hdf_sd.HDF_SDS(filenames[0], 'Longitude_Midpoint'))[0, :]
        lon_coord = DimCoord(lon_data,
                             standard_name='longitude',
                             units='degrees_east')

        cubes = CubeList()
        for f in filenames:
            t = get_data(VDS(f, "Nominal_Year_Month"), True)[0]
            time_data = cis_standard_time_unit.date2num(
                datetime(int(t[0:4]), int(t[4:6]), 15))
            time_coord = AuxCoord(time_data,
                                  long_name='Profile_Time',
                                  standard_name='time',
                                  units=cis_standard_time_unit)

            # retrieve data + its metadata
            var = sdata[variable]
            metadata = hdf.read_metadata(var, "SD")

            data = self._get_calipso_data(hdf_sd.HDF_SDS(f, variable))

            pres_data = self._get_calipso_data(
                hdf_sd.HDF_SDS(f, 'Pressure_Mean'))
            pres_coord = AuxCoord(pres_data,
                                  standard_name='air_pressure',
                                  units='hPa')

            if data.ndim == 2:
                # pres_coord = new_axis()
                cube = Cube(data,
                            long_name=metadata.long_name or variable,
                            units=self.clean_units(metadata.units),
                            dim_coords_and_dims=[(lat_coord, 0),
                                                 (lon_coord, 1)],
                            aux_coords_and_dims=[(time_coord, ())])
                # Promote the time scalar coord to a length one dimension
                new_cube = new_axis(cube, 'time')
                cubes.append(new_cube)
            elif data.ndim == 3:
                # pres_coord = new_axis()
                cube = Cube(data,
                            long_name=metadata.long_name or variable,
                            units=self.clean_units(metadata.units),
                            dim_coords_and_dims=[(lat_coord, 0),
                                                 (lon_coord, 1),
                                                 (alt_coord, 2)],
                            aux_coords_and_dims=[(time_coord, ())])
                # Promote the time scalar coord to a length one dimension
                new_cube = new_axis(cube, 'time')
                # Then add the (extended) pressure coord so that it is explicitly a function of time
                new_cube.add_aux_coord(pres_coord[np.newaxis, ...],
                                       (0, 1, 2, 3))
                cubes.append(new_cube)
            else:
                raise ValueError(
                    "Unexpected number of dimensions for CALIOP data: {}".
                    format(data.ndim))

        # Concatenate the cubes from each file into a single GriddedData object
        gd = GriddedData.make_from_cube(cubes.concatenate_cube())
        return gd
Beispiel #43
0
 def test_empty(self):
     exc_regexp = "can't concatenate an empty CubeList"
     with self.assertRaisesRegex(ValueError, exc_regexp):
         CubeList([]).concatenate_cube()
Beispiel #44
0
 def setUp(self):
     self.cubes = CubeList([Cube(np.arange(3)),
                            Cube(np.arange(3))])
Beispiel #45
0
class Test_apply_ice(IrisTest):
    """Test the apply_ice method."""
    def setUp(self):
        """Create cubes with a single zero prob(precip) point.
        The cubes look like this:
        precipitation_amount / (kg m^-2)
        Dimension coordinates:
            time: 1;
            projection_y_coordinate: 3;
            projection_x_coordinate: 3;
        Auxiliary coordinates:
            forecast_period (on time coord): 0.0 hours (simulates nowcast data)
        Scalar coordinates:
            forecast_reference_time: 2015-11-23 03:00:00
        Data:
        self.fg_cube:
            All points contain float(1.)
            Cube name is "probability_of_lightning".
        self.ice_cube:
            With extra coordinate of length(3) "threshold" containing
            points [0.5, 1., 2.] kg m^-2.
            Time and forecast_period dimensions "sqeezed" to be Scalar coords.
            All points contain float(0.)
            Cube name is "probability_of_vertical_integral_of_ice".
        """
        self.fg_cube = add_forecast_reference_time_and_forecast_period(
            set_up_cube_with_no_realizations(zero_point_indices=[],
                                             num_grid_points=3),
            fp_point=0.0)
        self.fg_cube.rename("probability_of_lightning")
        self.ice_cube = squeeze(
            add_forecast_reference_time_and_forecast_period(set_up_cube(
                num_realization_points=3,
                zero_point_indices=[],
                num_grid_points=3),
                                                            fp_point=0.0))
        threshold_coord = self.ice_cube.coord('realization')
        threshold_coord.points = [0.5, 1.0, 2.0]
        threshold_coord.rename('threshold')
        threshold_coord.units = cf_units.Unit('kg m^-2')
        self.ice_cube.data = np.zeros_like(self.ice_cube.data)
        self.ice_cube.rename("probability_of_vertical_integral_of_ice")
        self.plugin = Plugin()

    def test_basic(self):
        """Test that the method returns the expected cube type"""
        result = self.plugin.apply_ice(self.fg_cube, self.ice_cube)
        self.assertIsInstance(result, Cube)

    def test_input(self):
        """Test that the method does not modify the input cubes."""
        cube_a = self.fg_cube.copy()
        cube_b = self.ice_cube.copy()
        self.plugin.apply_ice(cube_a, cube_b)
        self.assertArrayAlmostEqual(cube_a.data, self.fg_cube.data)
        self.assertArrayAlmostEqual(cube_b.data, self.ice_cube.data)

    def test_missing_threshold_low(self):
        """Test that the method raises an error if the ice_cube doesn't
        have a threshold coordinate for 0.5."""
        self.ice_cube.coord('threshold').points = [0.4, 1., 2.]
        msg = (r"No matching prob\(Ice\) cube for threshold 0.5")
        with self.assertRaisesRegex(ConstraintMismatchError, msg):
            self.plugin.apply_ice(self.fg_cube, self.ice_cube)

    def test_missing_threshold_mid(self):
        """Test that the method raises an error if the ice_cube doesn't
        have a threshold coordinate for 1.0."""
        self.ice_cube.coord('threshold').points = [0.5, 0.9, 2.]
        msg = (r"No matching prob\(Ice\) cube for threshold 1.")
        with self.assertRaisesRegex(ConstraintMismatchError, msg):
            self.plugin.apply_ice(self.fg_cube, self.ice_cube)

    def test_missing_threshold_high(self):
        """Test that the method raises an error if the ice_cube doesn't
        have a threshold coordinate for 2.0."""
        self.ice_cube.coord('threshold').points = [0.5, 1., 4.]
        msg = (r"No matching prob\(Ice\) cube for threshold 2.")
        with self.assertRaisesRegex(ConstraintMismatchError, msg):
            self.plugin.apply_ice(self.fg_cube, self.ice_cube)

    def test_ice_null(self):
        """Test that small VII probs do not increase moderate lightning risk"""
        self.ice_cube.data[:, 1, 1] = 0.
        self.ice_cube.data[0, 1, 1] = 0.5
        self.fg_cube.data[0, 1, 1] = 0.25
        expected = self.fg_cube.copy()
        # expected.data contains all ones except:
        expected.data[0, 1, 1] = 0.25
        result = self.plugin.apply_ice(self.fg_cube, self.ice_cube)
        self.assertArrayAlmostEqual(result.data, expected.data)

    def test_ice_zero(self):
        """Test that zero VII probs do not increase zero lightning risk"""
        self.ice_cube.data[:, 1, 1] = 0.
        self.fg_cube.data[0, 1, 1] = 0.
        expected = self.fg_cube.copy()
        # expected.data contains all ones except:
        expected.data[0, 1, 1] = 0.
        result = self.plugin.apply_ice(self.fg_cube, self.ice_cube)
        self.assertArrayAlmostEqual(result.data, expected.data)

    def test_ice_small(self):
        """Test that small VII probs do increase zero lightning risk"""
        self.ice_cube.data[:, 1, 1] = 0.
        self.ice_cube.data[0, 1, 1] = 0.5
        self.fg_cube.data[0, 1, 1] = 0.
        expected = self.fg_cube.copy()
        # expected.data contains all ones except:
        expected.data[0, 1, 1] = 0.05
        result = self.plugin.apply_ice(self.fg_cube, self.ice_cube)
        self.assertArrayAlmostEqual(result.data, expected.data)

    def test_ice_large(self):
        """Test that large VII probs do increase zero lightning risk"""
        self.ice_cube.data[:, 1, 1] = 1.
        self.fg_cube.data[0, 1, 1] = 0.
        expected = self.fg_cube.copy()
        # expected.data contains all ones except:
        expected.data[0, 1, 1] = 0.9
        result = self.plugin.apply_ice(self.fg_cube, self.ice_cube)
        self.assertArrayAlmostEqual(result.data, expected.data)

    def test_ice_large_with_fc(self):
        """Test that large VII probs do increase zero lightning risk when
        forecast lead time is non-zero (two forecast_period points)"""
        self.ice_cube.data[:, 1, 1] = 1.
        self.fg_cube.data[0, 1, 1] = 0.
        self.fg_cube.coord('forecast_period').points = [1.]  # hours
        fg_cube_next = self.fg_cube.copy()
        time_pt, = self.fg_cube.coord('time').points
        fg_cube_next.coord('time').points = [time_pt + 2.]  # hours
        fg_cube_next.coord('forecast_period').points = [3.]  # hours
        self.fg_cube = CubeList([squeeze(self.fg_cube),
                                 squeeze(fg_cube_next)]).merge_cube()
        expected = self.fg_cube.copy()
        # expected.data contains all ones except:
        expected.data[0, 1, 1] = 0.54
        expected.data[1, 1, 1] = 0.0
        result = self.plugin.apply_ice(self.fg_cube, self.ice_cube)
        self.assertArrayAlmostEqual(result.data, expected.data)
Beispiel #46
0
 def setUp(self):
     self.scalar_cubes = CubeList()
     for i in range(5):
         for letter in 'abcd':
             self.scalar_cubes.append(Cube(i, long_name=letter))
Beispiel #47
0
 def test_valid_time(self):
     """Case for a time that is available within the diagnostic cube."""
     plugin = extract_cube_at_time
     cubes = CubeList([self.cube])
     result = plugin(cubes, self.time_dt, self.time_constraint)
     self.assertIsInstance(result, Cube)
Beispiel #48
0
def fix_metadata(cubes,
                 short_name,
                 project,
                 dataset,
                 cmor_table=None,
                 mip=None,
                 frequency=None):
    """
    Fix cube metadata if fixes are required and check it anyway.

    This method collects all the relevant fixes for a given variable, applies
    them and checks the resulting cube (or the original if no fixes were
    needed) metadata to ensure that it complies with the standards of its
    project CMOR tables.

    Parameters
    ----------
    cubes: iris.cube.CubeList
        Cubes to fix
    short_name: str
        Variable's short name
    project: str

    dataset: str

    cmor_table: str, optional
        CMOR tables to use for the check, if available

    mip: str, optional
        Variable's MIP, if available

    frequency: str, optional
        Variable's data frequency, if available

    Returns
    -------
    iris.cube.Cube:
        Fixed and checked cube

    Raises
    ------
    CMORCheckError
        If the checker detects errors in the metadata that it can not fix.

    """
    fixes = Fix.get_fixes(project=project,
                          dataset=dataset,
                          variable=short_name)
    fixed_cubes = []
    by_file = defaultdict(list)
    for cube in cubes:
        by_file[cube.attributes.get('source_file', '')].append(cube)

    for cube_list in by_file.values():
        cube_list = CubeList(cube_list)
        for fix in fixes:
            cube_list = fix.fix_metadata(cube_list)

        if len(cube_list) != 1:
            cube = None
            for raw_cube in cube_list:
                if raw_cube.var_name == short_name:
                    cube = raw_cube
                    break
            if not cube:
                raise ValueError(
                    'More than one cube found for variable %s in %s:%s but '
                    'none of their var_names match the expected. \n'
                    'Full list of cubes encountered: %s' %
                    (short_name, project, dataset, cube_list))
            logger.warning(
                'Found variable %s in %s:%s, but there were other present in '
                'the file. Those extra variables are usually metadata '
                '(cell area, latitude descriptions) that was not saved '
                'properly. It is possible that errors appear further on '
                'because of this. \nFull list of cubes encountered: %s',
                short_name, project, dataset, cube_list)
        else:
            cube = cube_list[0]

        if cmor_table and mip:
            checker = _get_cmor_checker(frequency=frequency,
                                        table=cmor_table,
                                        mip=mip,
                                        short_name=short_name,
                                        fail_on_error=False,
                                        automatic_fixes=True)
            cube = checker(cube).check_metadata()
        cube.attributes.pop('source_file', None)
        fixed_cubes.append(cube)
    return fixed_cubes
Beispiel #49
0
bbox = [-87.40, 24.25, -74.70, 36.70]

units = iris.unit.Unit('celsius')

name_list = ['sea_water_temperature',
             'sea_surface_temperature',
             'sea_water_potential_temperature',
             'equivalent_potential_temperature',
             'sea_water_conservative_temperature',
             'pseudo_equivalent_potential_temperature']

url = "http://crow.marine.usf.edu:8080/thredds/dodsC/FVCOM-Nowcast-Agg.nc"
cubes = iris.load_raw(url)

in_list = lambda cube: cube.standard_name in name_list
cubes = CubeList([cube for cube in cubes if in_list(cube)])
cube = cubes.merge_cube()

lat = iris.Constraint(latitude=lambda cell: bbox[1] <= cell < bbox[3])
lon = iris.Constraint(longitude=lambda cell: bbox[0] <= cell <= bbox[2])
cube = cube.extract(lon & lat)

istart = time_near(cube, start)
istop = time_near(cube, stop)
cube = cube[istart:istop, ...]

# <codecell>

print cube

# <codecell>