Пример #1
0
 def test_coord_with_string_points(self):
     # Check that a `TypeError` is captured.
     coord = AuxCoord(["a", "b", "c"])
     result = is_regular(coord)
     self.assertFalse(result)
Пример #2
0
def build_spotdata_cube(
    data,
    name,
    units,
    altitude,
    latitude,
    longitude,
    wmo_id,
    scalar_coords=None,
    neighbour_methods=None,
    grid_attributes=None,
    additional_dims=None,
):
    """
    Function to build a spotdata cube with expected dimension and auxiliary
    coordinate structure.

    It can be used to create spot data cubes. In this case the data is the
    spot data values at each site, and the coordinates that describe each site.

    It can also be used to create cubes which describe the grid points that are
    used to extract each site from a gridded field, for different selection
    method. The selection methods are specified by the neighbour_methods
    coordinate. The grid_attribute coordinate encapsulates information required
    to extract data, for example the x/y indices that identify the grid point
    neighbour.

    .. See the documentation for examples of these cubes.
    .. include:: extended_documentation/spotdata/build_spotdata_cube/
       build_spotdata_cube_examples.rst

    Args:
        data (float or numpy.ndarray):
            Float spot data or array of data points from several sites.
            The spot index should be the last dimension if the array is
            multi-dimensional (see optional additional dimensions below).
        name (str):
            Cube name (eg 'air_temperature')
        units (str):
            Cube units (eg 'K')
        altitude (float or numpy.ndarray):
            Float or 1d array of site altitudes in metres
        latitude (float or numpy.ndarray):
            Float or 1d array of site latitudes in degrees
        longitude (float or numpy.ndarray):
            Float or 1d array of site longitudes in degrees
        wmo_id (str or list of str):
            String or list of site 5-digit WMO identifiers
        scalar_coords (list of iris.coords.AuxCoord):
            Optional list of iris.coords.AuxCoord instances
        neighbour_methods (list of str):
            Optional list of neighbour method names, e.g. 'nearest'
        grid_attributes (list of str):
            Optional list of grid attribute names, e.g. x-index, y-index
        additional_dims (list of iris.coords.DimCoord):
            Optional list of additional dimensions to preceed the spot data dimension.

    Returns:
        iris.cube.Cube:
            A cube containing the extracted spot data with spot data being the final dimension.
    """

    # construct auxiliary coordinates
    alt_coord = AuxCoord(altitude, "altitude", units="m")
    lat_coord = AuxCoord(latitude, "latitude", units="degrees")
    lon_coord = AuxCoord(longitude, "longitude", units="degrees")
    id_coord = AuxCoord(wmo_id, long_name="wmo_id", units="no_unit")

    aux_coords_and_dims = []

    # append scalar coordinates
    if scalar_coords is not None:
        for coord in scalar_coords:
            aux_coords_and_dims.append((coord, None))

    # construct dimension coordinates
    if np.isscalar(data):
        data = np.array([data])
    spot_index = DimCoord(np.arange(data.shape[-1], dtype=np.int32),
                          long_name="spot_index",
                          units="1")

    dim_coords_and_dims = []
    current_dim = 0

    if neighbour_methods is not None:
        neighbour_methods_coord = DimCoord(
            np.arange(len(neighbour_methods), dtype=np.int32),
            long_name="neighbour_selection_method",
            units="1",
        )
        neighbour_methods_key = AuxCoord(
            neighbour_methods,
            long_name="neighbour_selection_method_name",
            units="no_unit",
        )

        dim_coords_and_dims.append((neighbour_methods_coord, current_dim))
        aux_coords_and_dims.append((neighbour_methods_key, current_dim))
        current_dim += 1

    if grid_attributes is not None:
        grid_attributes_coord = DimCoord(
            np.arange(len(grid_attributes), dtype=np.int32),
            long_name="grid_attributes",
            units="1",
        )
        grid_attributes_key = AuxCoord(grid_attributes,
                                       long_name="grid_attributes_key",
                                       units="no_unit")

        dim_coords_and_dims.append((grid_attributes_coord, current_dim))
        aux_coords_and_dims.append((grid_attributes_key, current_dim))
        current_dim += 1

    if additional_dims is not None:
        for coord in additional_dims:
            dim_coords_and_dims.append((coord, current_dim))
            current_dim += 1

    dim_coords_and_dims.append((spot_index, current_dim))
    for coord in [alt_coord, lat_coord, lon_coord, id_coord]:
        aux_coords_and_dims.append((coord, current_dim))

    # create output cube
    spot_cube = iris.cube.Cube(
        data,
        long_name=name,
        units=units,
        dim_coords_and_dims=dim_coords_and_dims,
        aux_coords_and_dims=aux_coords_and_dims,
    )
    # rename to force a standard name to be set if name is valid
    spot_cube.rename(name)

    return spot_cube
Пример #3
0
 def test_numpy_scalar_coord__zero_ndim(self):
     points = np.array(1)
     coord = AuxCoord(points)
     result = AuxCoordFactory._nd_points(coord, (), 0)
     expected = np.array([1])
     self.assertArrayEqual(result, expected)
Пример #4
0
def _generate_cubes(header, column_headings, coords, data_arrays,
                    cell_methods=None):
    """
    Yield :class:`iris.cube.Cube` instances given
    the headers, column headings, coords and data_arrays extracted
    from a NAME file.

    """
    for i, data_array in enumerate(data_arrays):
        # Turn the dictionary of column headings with a list of header
        # information for each field into a dictionary of headings for
        # just this field.
        field_headings = {k: v[i] for k, v in six.iteritems(column_headings)}

        # Make a cube.
        cube = iris.cube.Cube(data_array)

        # Determine the name and units.
        name = '{} {}'.format(field_headings['Species'],
                              field_headings['Quantity'])
        name = name.upper().replace(' ', '_')
        cube.rename(name)

        # Some units are not in SI units, are missing spaces or typed
        # in the wrong case. _parse_units returns units that are
        # recognised by Iris.
        cube.units = _parse_units(field_headings['Units'])

        # Define and add the singular coordinates of the field (flight
        # level, time etc.)
        if 'Z' in field_headings:
            upper_bound, = [field_headings['... to [Z]']
                            if '... to [Z]' in field_headings else None]
            lower_bound, = [field_headings['... from [Z]']
                            if '... from [Z]' in field_headings else None]
            z_coord = _cf_height_from_name(field_headings['Z'],
                                           upper_bound=upper_bound,
                                           lower_bound=lower_bound)
            cube.add_aux_coord(z_coord)

        # Define the time unit and use it to serialise the datetime for
        # the time coordinate.
        time_unit = cf_units.Unit(
            'hours since epoch', calendar=cf_units.CALENDAR_GREGORIAN)

        # Build time, height, latitude and longitude coordinates.
        for coord in coords:
            pts = coord.values
            coord_sys = None
            if coord.name == 'latitude' or coord.name == 'longitude':
                coord_units = 'degrees'
                coord_sys = iris.coord_systems.GeogCS(EARTH_RADIUS)
            if coord.name == 'height':
                coord_units = 'm'
                long_name = 'height above ground level'
                pts = coord.values
            if coord.name == 'altitude':
                coord_units = 'm'
                long_name = 'altitude above sea level'
                pts = coord.values
            if coord.name == 'air_pressure':
                coord_units = 'Pa'
                pts = coord.values
            if coord.name == 'flight_level':
                pts = coord.values
                long_name = 'flight_level'
                coord_units = _parse_units('FL')
            if coord.name == 'time':
                coord_units = time_unit
                pts = time_unit.date2num(coord.values)

            if coord.dimension is not None:
                if coord.name == 'longitude':
                    circular = iris.util._is_circular(pts, 360.0)
                else:
                    circular = False
                if coord.name == 'flight_level':
                    icoord = DimCoord(points=pts,
                                      units=coord_units,
                                      long_name=long_name,)
                else:
                    icoord = DimCoord(points=pts,
                                      standard_name=coord.name,
                                      units=coord_units,
                                      coord_system=coord_sys,
                                      circular=circular)
                if coord.name == 'height' or coord.name == 'altitude':
                    icoord.long_name = long_name
                if coord.name == 'time' and 'Av or Int period' in \
                        field_headings:
                    dt = coord.values - \
                        field_headings['Av or Int period']
                    bnds = time_unit.date2num(
                        np.vstack((dt, coord.values)).T)
                    icoord.bounds = bnds
                else:
                    icoord.guess_bounds()
                cube.add_dim_coord(icoord, coord.dimension)
            else:
                icoord = AuxCoord(points=pts[i],
                                  standard_name=coord.name,
                                  coord_system=coord_sys,
                                  units=coord_units)
                if coord.name == 'time' and 'Av or Int period' in \
                        field_headings:
                    dt = coord.values - \
                        field_headings['Av or Int period']
                    bnds = time_unit.date2num(
                        np.vstack((dt, coord.values)).T)
                    icoord.bounds = bnds[i, :]
                cube.add_aux_coord(icoord)

        # Headings/column headings which are encoded elsewhere.
        headings = ['X', 'Y', 'Z', 'Time', 'T', 'Units',
                    'Av or Int period',
                    '... from [Z]', '... to [Z]',
                    'X grid origin', 'Y grid origin',
                    'X grid size', 'Y grid size',
                    'X grid resolution', 'Y grid resolution',
                    'Number of field cols', 'Number of preliminary cols',
                    'Number of fields', 'Number of series',
                    'Output format', ]

        # Add the Main Headings as attributes.
        for key, value in six.iteritems(header):
            if value is not None and value != '' and \
                    key not in headings:
                cube.attributes[key] = value

        # Add the Column Headings as attributes
        for key, value in six.iteritems(field_headings):
            if value is not None and value != '' and \
                    key not in headings:
                cube.attributes[key] = value

        if cell_methods is not None:
            cube.add_cell_method(cell_methods[i])

        yield cube
Пример #5
0
 def setUp(self):
     super(TestGraphicStringCoord, self).setUp()
     self.cube = simple_2d(with_bounds=True)
     self.cube.add_aux_coord(AuxCoord(list('abcd'), long_name='str_coord'),
                             1)
Пример #6
0
def _convert_vertical_coords(lbcode,
                             lbvc,
                             blev,
                             lblev,
                             stash,
                             bhlev,
                             bhrlev,
                             brsvd1,
                             brsvd2,
                             brlev,
                             dim=None):
    """
    Encode scalar or vector vertical level values from PP headers as CM data
    components.

    Args:

    * lbcode:
        Scalar field :class:`iris.fileformats.pp.SplittableInt` value.

    * lbvc:
        Scalar field value.

    * blev:
        Scalar field value or :class:`numpy.ndarray` vector of field values.

    * lblev:
        Scalar field value or :class:`numpy.ndarray` vector of field values.

    * stash:
        Scalar field :class:`iris.fileformats.pp.STASH` value.

    * bhlev:
        Scalar field value or :class:`numpy.ndarray` vector of field values.

    * bhrlev:
        Scalar field value or :class:`numpy.ndarray` vector of field values.

    * brsvd1:
        Scalar field value or :class:`numpy.ndarray` vector of field values.

    * brsvd2:
        Scalar field value or :class:`numpy.ndarray` vector of field values.

    * brlev:
        Scalar field value or :class:`numpy.ndarray` vector of field values.

    Kwargs:

    * dim:
        Associated dimension of the vertical coordinate. Defaults to None.

    Returns:
        A tuple containing a list of coords_and_dims, and a list of factories.

    """
    factories = []
    coords_and_dims = []

    # See Word no. 33 (LBLEV) in section 4 of UM Model Docs (F3).
    BASE_RHO_LEVEL_LBLEV = 9999
    model_level_number = np.atleast_1d(lblev)
    model_level_number[model_level_number == BASE_RHO_LEVEL_LBLEV] = 0

    # Ensure to vectorise these arguments as arrays, as they participate
    # in the conditions of convert rules.
    blev = np.atleast_1d(blev)
    brsvd1 = np.atleast_1d(brsvd1)
    brlev = np.atleast_1d(brlev)

    # Height.
    if (lbvc == 1) and \
            str(stash) not in STASHCODE_IMPLIED_HEIGHTS and \
            np.all(blev != -1):
        coord = _dim_or_aux(blev,
                            standard_name='height',
                            units='m',
                            attributes={'positive': 'up'})
        coords_and_dims.append((coord, dim))

    if str(stash) in STASHCODE_IMPLIED_HEIGHTS:
        height = STASHCODE_IMPLIED_HEIGHTS[str(stash)]
        coord = DimCoord(height,
                         standard_name='height',
                         units='m',
                         attributes={'positive': 'up'})
        coords_and_dims.append((coord, None))

    # Model level number.
    if (len(lbcode) != 5) and \
            (lbvc == 2):
        coord = _dim_or_aux(model_level_number,
                            standard_name='model_level_number',
                            attributes={'positive': 'down'})
        coords_and_dims.append((coord, dim))

    # Depth - unbound.
    if (len(lbcode) != 5) and \
            (lbvc == 2) and \
            np.all(brsvd1 == brlev):
        coord = _dim_or_aux(blev,
                            standard_name='depth',
                            units='m',
                            attributes={'positive': 'down'})
        coords_and_dims.append((coord, dim))

    # Depth - bound.
    if (len(lbcode) != 5) and \
            (lbvc == 2) and \
            np.all(brsvd1 != brlev):
        coord = _dim_or_aux(blev,
                            standard_name='depth',
                            units='m',
                            bounds=np.vstack((brsvd1, brlev)).T,
                            attributes={'positive': 'down'})
        coords_and_dims.append((coord, dim))

    # Depth - unbound and bound (mixed).
    if (len(lbcode) != 5) and \
            (lbvc == 2) and \
            (np.any(brsvd1 == brlev) and np.any(brsvd1 != brlev)):
        lower = np.where(brsvd1 == brlev, blev, brsvd1)
        upper = np.where(brsvd1 == brlev, blev, brlev)
        coord = _dim_or_aux(blev,
                            standard_name='depth',
                            units='m',
                            bounds=np.vstack((lower, upper)).T,
                            attributes={'positive': 'down'})
        coords_and_dims.append((coord, dim))

    # Soil level/depth.
    if len(lbcode) != 5 and lbvc == 6:
        if np.all(brsvd1 == 0) and np.all(brlev == 0):
            # UM populates lblev, brsvd1 and brlev metadata INCORRECTLY,
            # so continue to treat as a soil level.
            coord = _dim_or_aux(model_level_number,
                                long_name='soil_model_level_number',
                                attributes={'positive': 'down'})
            coords_and_dims.append((coord, dim))
        elif np.any(brsvd1 != brlev):
            # UM populates metadata CORRECTLY,
            # so treat it as the expected (bounded) soil depth.
            coord = _dim_or_aux(blev,
                                standard_name='depth',
                                units='m',
                                bounds=np.vstack((brsvd1, brlev)).T,
                                attributes={'positive': 'down'})
            coords_and_dims.append((coord, dim))

    # Pressure.
    if (lbvc == 8) and \
            (len(lbcode) != 5 or (len(lbcode) == 5 and
                                  1 not in [lbcode.ix, lbcode.iy])):
        coord = _dim_or_aux(blev, long_name='pressure', units='hPa')
        coords_and_dims.append((coord, dim))

    # Air potential temperature.
    if (len(lbcode) != 5) and \
            (lbvc == 19):
        coord = _dim_or_aux(blev,
                            standard_name='air_potential_temperature',
                            units='K',
                            attributes={'positive': 'up'})
        coords_and_dims.append((coord, dim))

    # Hybrid pressure levels.
    if lbvc == 9:
        model_level_number = _dim_or_aux(model_level_number,
                                         standard_name='model_level_number',
                                         attributes={'positive': 'up'})
        level_pressure = _dim_or_aux(bhlev,
                                     long_name='level_pressure',
                                     units='Pa',
                                     bounds=np.vstack((bhrlev, brsvd2)).T)
        sigma = AuxCoord(blev,
                         long_name='sigma',
                         bounds=np.vstack((brlev, brsvd1)).T)
        coords_and_dims.extend([(model_level_number, dim),
                                (level_pressure, dim), (sigma, dim)])
        factories.append(
            Factory(HybridPressureFactory,
                    [{
                        'long_name': 'level_pressure'
                    }, {
                        'long_name': 'sigma'
                    },
                     Reference('surface_air_pressure')]))

    # Hybrid height levels.
    if lbvc == 65:
        model_level_number = _dim_or_aux(model_level_number,
                                         standard_name='model_level_number',
                                         attributes={'positive': 'up'})
        level_height = _dim_or_aux(blev,
                                   long_name='level_height',
                                   units='m',
                                   bounds=np.vstack((brlev, brsvd1)).T,
                                   attributes={'positive': 'up'})
        sigma = AuxCoord(bhlev,
                         long_name='sigma',
                         bounds=np.vstack((bhrlev, brsvd2)).T)
        coords_and_dims.extend([(model_level_number, dim), (level_height, dim),
                                (sigma, dim)])
        factories.append(
            Factory(HybridHeightFactory, [{
                'long_name': 'level_height'
            }, {
                'long_name': 'sigma'
            },
                                          Reference('orography')]))

    return coords_and_dims, factories
Пример #7
0
def load_NAMEIII_trajectory(filename):
    """
    Load a NAME III trajectory file returning a
    generator of :class:`iris.cube.Cube` instances.

    Args:

    * filename (string):
        Name of file to load.

    Returns:
        A generator :class:`iris.cube.Cube` instances.

    """
    time_unit = cf_units.Unit('hours since epoch',
                              calendar=cf_units.CALENDAR_GREGORIAN)

    with open(filename, 'r') as infile:
        header = read_header(infile)

        # read the column headings
        for line in infile:
            if line.startswith("    "):
                break
        headings = [heading.strip() for heading in line.split(",")]

        # read the columns
        columns = [[] for i in range(len(headings))]
        for line in infile:
            values = [v.strip() for v in line.split(",")]
            for c, v in enumerate(values):
                if "UTC" in v:
                    v = v.replace(":00 ", " ")  # Strip out milliseconds.
                    v = datetime.datetime.strptime(v, NAMEIII_DATETIME_FORMAT)
                else:
                    try:
                        v = float(v)
                    except ValueError:
                        pass
                columns[c].append(v)

    # Where's the Z column?
    z_column = None
    for i, heading in enumerate(headings):
        if heading.startswith("Z "):
            z_column = i
            break
    if z_column is None:
        raise TranslationError("Expected a Z column")

    # Every column up to Z becomes a coordinate.
    coords = []
    for name, values in zip(headings[:z_column+1], columns[:z_column+1]):
        values = np.array(values)
        if np.all(np.array(values) == values[0]):
            values = [values[0]]

        standard_name = long_name = units = None
        if isinstance(values[0], datetime.datetime):
            values = time_unit.date2num(values)
            units = time_unit
            if name == "Time":
                name = "time"
        elif " (Lat-Long)" in name:
            if name.startswith("X"):
                name = "longitude"
            elif name.startswith("Y"):
                name = "latitude"
            units = "degrees"
        elif name == "Z (m asl)":
            name = "altitude"
            units = "m"
            long_name = "altitude above sea level"
        elif name == "Z (m agl)":
            name = 'height'
            units = "m"
            long_name = "height above ground level"
        elif name == "Z (FL)":
            name = "flight_level"
            long_name = name

        try:
            coord = DimCoord(values, units=units)
        except ValueError:
            coord = AuxCoord(values, units=units)
        coord.rename(name)
        if coord.long_name is None and long_name is not None:
            coord.long_name = long_name
        coords.append(coord)

    # Every numerical column after the Z becomes a cube.
    for name, values in zip(headings[z_column+1:], columns[z_column+1:]):
        try:
            float(values[0])
        except ValueError:
            continue
        # units embedded in column heading?
        name, units = _split_name_and_units(name)
        cube = iris.cube.Cube(values, units=units)
        cube.rename(name)
        for coord in coords:
            dim = 0 if len(coord.points) > 1 else None
            if isinstance(coord, DimCoord) and coord.name() == "time":
                cube.add_dim_coord(coord.copy(), dim)
            else:
                cube.add_aux_coord(coord.copy(), dim)
        yield cube
Пример #8
0
 def test_coord_long_name(self):
     token = "long_name"
     coord = AuxCoord(1, long_name=token)
     self._check(token, coord)
Пример #9
0
 def test_coord_long_name_default(self):
     token = "long name"  # includes space
     coord = AuxCoord(1, long_name=token)
     self._check(token, coord, default=True)
Пример #10
0
def get_cube(name, lat=((0, 1), (2, 3)), lon=((0, 1), (2, 3))):
    cube = Cube(np.ones((2, 2)), name)
    cube.add_aux_coord(AuxCoord(lat, 'latitude'), (0, 1))
    cube.add_aux_coord(AuxCoord(lon, 'longitude'), (0, 1))
    return cube
Пример #11
0
 def test_coord_standard_name(self):
     token = "air_temperature"
     coord = AuxCoord(1, standard_name=token)
     self._check(token, coord)
Пример #12
0
def test_get_allvars_fix():
    """Test getting of fix."""
    fix = Fix.get_fixes('CMIP5', 'FGOALS-s2', 'Amon', 'tas')
    assert fix == [AllVars(None)]


LAT_COORD = DimCoord(
    [-20.0, 0.0, 10.0],
    bounds=[[-30.0, -10.0], [-10.0, 5.0], [5.0, 20.0]],
    var_name='lat',
    standard_name='latitude',
)
LAT_COORD_MULT = AuxCoord(
    [[-20.0], [0.0], [10.0]],
    bounds=[[[-30.0, -10.0]], [[-10.0, 5.0]], [[5.0, 20.0]]],
    var_name='lat',
    standard_name='latitude',
)
LAT_COORD_SMALL = DimCoord([0.0],
                           bounds=[-45.0, 45.0],
                           var_name='lat',
                           standard_name='latitude')


def test_allvars_fix_metadata():
    """Test ``fix_metadata`` for all variables."""
    cubes = CubeList([
        Cube([1, 2, 3], dim_coords_and_dims=[(LAT_COORD.copy(), 0)]),
        Cube([[1], [2], [3]],
             aux_coords_and_dims=[(LAT_COORD_MULT.copy(), (0, 1))]),
        Cube([1], dim_coords_and_dims=[(LAT_COORD_SMALL.copy(), 0)]),
Пример #13
0
 def test_coord_with_irregular_step(self):
     name = "latitude"
     coord = AuxCoord(np.array([2, 5, 1, 4]), standard_name=name)
     exp_emsg = "{} is not regular".format(name)
     with self.assertRaisesRegex(CoordinateNotRegularError, exp_emsg):
         regular_step(coord)
Пример #14
0
 def test_2d_coord(self):
     coord = AuxCoord(np.arange(8).reshape(2, 4))
     exp_emsg = "Expected 1D coord"
     with self.assertRaisesRegex(CoordinateMultiDimError, exp_emsg):
         regular_step(coord)
Пример #15
0
def _make_cube_3d(x, y, z, data, aux=None, offset=0):
    """
    A convenience test function that creates a custom 3D cube.

    Args:

    * x:
        A (start, stop, step) tuple for specifying the
        x-axis dimensional coordinate points. Bounds are
        automatically guessed.

    * y:
        A (start, stop, step) tuple for specifying the
        y-axis dimensional coordinate points. Bounds are
        automatically guessed.

    * z:
        A (start, stop, step) tuple for specifying the
        z-axis dimensional coordinate points. Bounds are
        automatically guessed.

    * data:
        The data payload for the cube.

    Kwargs:

    * aux:
        A CSV string specifying which points only auxiliary
        coordinates to create. Accepts either of 'x', 'y', 'z',
        'xy', 'xz', 'yz', 'xyz'.

    * offset:
        Offset value to be added to non-1D auxiliary coordinate
        points.

    Returns:
        The newly created 3D :class:`iris.cube.Cube`.

    """
    x_range = np.arange(*x, dtype=np.float32)
    y_range = np.arange(*y, dtype=np.float32)
    z_range = np.arange(*z, dtype=np.float32)
    x_size, y_size, z_size = len(x_range), len(y_range), len(z_range)

    cube_data = np.empty((x_size, y_size, z_size), dtype=np.float32)
    cube_data[:] = data
    cube = iris.cube.Cube(cube_data)
    coord = DimCoord(z_range, long_name='z')
    coord.guess_bounds()
    cube.add_dim_coord(coord, 0)
    coord = DimCoord(y_range, long_name='y')
    coord.guess_bounds()
    cube.add_dim_coord(coord, 1)
    coord = DimCoord(x_range, long_name='x')
    coord.guess_bounds()
    cube.add_dim_coord(coord, 2)

    if aux is not None:
        aux = aux.split(',')
        if 'z' in aux:
            coord = AuxCoord(z_range * 10, long_name='z-aux')
            cube.add_aux_coord(coord, (0, ))
        if 'y' in aux:
            coord = AuxCoord(y_range * 10, long_name='y-aux')
            cube.add_aux_coord(coord, (1, ))
        if 'x' in aux:
            coord = AuxCoord(x_range * 10, long_name='x-aux')
            cube.add_aux_coord(coord, (2, ))
        if 'xy' in aux:
            payload = np.arange(x_size * y_size,
                                dtype=np.float32).reshape(y_size, x_size)
            coord = AuxCoord(payload + offset, long_name='xy-aux')
            cube.add_aux_coord(coord, (1, 2))
        if 'xz' in aux:
            payload = np.arange(x_size * z_size,
                                dtype=np.float32).reshape(z_size, x_size)
            coord = AuxCoord(payload * 10 + offset, long_name='xz-aux')
            cube.add_aux_coord(coord, (0, 2))
        if 'yz' in aux:
            payload = np.arange(y_size * z_size,
                                dtype=np.float32).reshape(z_size, y_size)
            coord = AuxCoord(payload * 100 + offset, long_name='yz-aux')
            cube.add_aux_coord(coord, (0, 1))
        if 'xyz' in aux:
            payload = np.arange(x_size * y_size * z_size,
                                dtype=np.float32).reshape(
                                    z_size, y_size, x_size)
            coord = AuxCoord(payload * 1000 + offset, long_name='xyz-aux')
            cube.add_aux_coord(coord, (0, 1, 2))

    return cube
Пример #16
0
 def test_coord_var_name(self):
     token = "var_name"
     coord = AuxCoord(1, var_name=token)
     self._check(token, coord)
Пример #17
0
def _make_cube(x, y, data, aux=None, offset=0, scalar=None):
    """
    A convenience test function that creates a custom 2D cube.

    Args:

    * x:
        A (start, stop, step) tuple for specifying the
        x-axis dimensional coordinate points. Bounds are
        automatically guessed.

    * y:
        A (start, stop, step) tuple for specifying the
        y-axis dimensional coordinate points. Bounds are
        automatically guessed.

    * data:
        The data payload for the cube.

    Kwargs:

    * aux:
        A CSV string specifying which points only auxiliary
        coordinates to create. Accepts either of 'x', 'y', 'xy'.

    * offset:
        Offset value to be added to the 'xy' auxiliary coordinate
        points.

    * scalar:
        Create a 'height' scalar coordinate with the given value.

    Returns:
        The newly created 2D :class:`iris.cube.Cube`.

    """
    x_range = np.arange(*x, dtype=np.float32)
    y_range = np.arange(*y, dtype=np.float32)
    x_size = len(x_range)
    y_size = len(y_range)

    cube_data = np.empty((y_size, x_size), dtype=np.float32)
    cube_data[:] = data
    cube = iris.cube.Cube(cube_data)
    coord = DimCoord(y_range, long_name='y')
    coord.guess_bounds()
    cube.add_dim_coord(coord, 0)
    coord = DimCoord(x_range, long_name='x')
    coord.guess_bounds()
    cube.add_dim_coord(coord, 1)

    if aux is not None:
        aux = aux.split(',')
        if 'y' in aux:
            coord = AuxCoord(y_range * 10, long_name='y-aux')
            cube.add_aux_coord(coord, (0, ))
        if 'x' in aux:
            coord = AuxCoord(x_range * 10, long_name='x-aux')
            cube.add_aux_coord(coord, (1, ))
        if 'xy' in aux:
            payload = np.arange(y_size * x_size,
                                dtype=np.float32).reshape(y_size, x_size)
            coord = AuxCoord(payload * 100 + offset, long_name='xy-aux')
            cube.add_aux_coord(coord, (0, 1))

    if scalar is not None:
        data = np.array([scalar], dtype=np.float32)
        coord = AuxCoord(data, long_name='height', units='m')
        cube.add_aux_coord(coord, ())

    return cube
Пример #18
0
 def test_coord_var_name_fail(self):
     token = "var name"  # includes space
     emsg = "is not a valid NetCDF variable name"
     with self.assertRaisesRegex(ValueError, emsg):
         AuxCoord(1, var_name=token)
Пример #19
0
def _all_other_rules(f):
    """
    This deals with all the other rules that have not been factored into any of
    the other convert_scalar_coordinate functions above.

    """
    references = []
    standard_name = None
    long_name = None
    units = None
    attributes = {}
    cell_methods = []
    dim_coords_and_dims = []
    aux_coords_and_dims = []

    # Season coordinates (--> scalar coordinates)
    if (f.lbtim.ib == 3 and f.lbtim.ic in [1, 2, 4]
            and (len(f.lbcode) != 5 or
                 (len(f.lbcode) == 5 and
                  (f.lbcode.ix not in [20, 21, 22, 23]
                   and f.lbcode.iy not in [20, 21, 22, 23]))) and f.lbmon == 12
            and f.lbdat == 1 and f.lbhr == 0 and f.lbmin == 0 and f.lbmond == 3
            and f.lbdatd == 1 and f.lbhrd == 0 and f.lbmind == 0):
        aux_coords_and_dims.append((AuxCoord('djf',
                                             long_name='season',
                                             units='no_unit'), None))

    if (f.lbtim.ib == 3 and f.lbtim.ic in [1, 2, 4]
            and ((len(f.lbcode) != 5) or
                 (len(f.lbcode) == 5 and f.lbcode.ix not in [20, 21, 22, 23]
                  and f.lbcode.iy not in [20, 21, 22, 23])) and f.lbmon == 3
            and f.lbdat == 1 and f.lbhr == 0 and f.lbmin == 0 and f.lbmond == 6
            and f.lbdatd == 1 and f.lbhrd == 0 and f.lbmind == 0):
        aux_coords_and_dims.append((AuxCoord('mam',
                                             long_name='season',
                                             units='no_unit'), None))

    if (f.lbtim.ib == 3 and f.lbtim.ic in [1, 2, 4]
            and ((len(f.lbcode) != 5) or
                 (len(f.lbcode) == 5 and f.lbcode.ix not in [20, 21, 22, 23]
                  and f.lbcode.iy not in [20, 21, 22, 23])) and f.lbmon == 6
            and f.lbdat == 1 and f.lbhr == 0 and f.lbmin == 0 and f.lbmond == 9
            and f.lbdatd == 1 and f.lbhrd == 0 and f.lbmind == 0):
        aux_coords_and_dims.append((AuxCoord('jja',
                                             long_name='season',
                                             units='no_unit'), None))

    if (f.lbtim.ib == 3 and f.lbtim.ic in [1, 2, 4]
            and ((len(f.lbcode) != 5) or
                 (len(f.lbcode) == 5 and f.lbcode.ix not in [20, 21, 22, 23]
                  and f.lbcode.iy not in [20, 21, 22, 23])) and f.lbmon == 9
            and f.lbdat == 1 and f.lbhr == 0 and f.lbmin == 0
            and f.lbmond == 12 and f.lbdatd == 1 and f.lbhrd == 0
            and f.lbmind == 0):
        aux_coords_and_dims.append((AuxCoord('son',
                                             long_name='season',
                                             units='no_unit'), None))

    # Special case where year is zero and months match.
    # Month coordinates (--> scalar coordinates)
    if (f.lbtim.ib == 2 and f.lbtim.ic in [1, 2, 4]
            and ((len(f.lbcode) != 5) or
                 (len(f.lbcode) == 5 and f.lbcode.ix not in [20, 21, 22, 23]
                  and f.lbcode.iy not in [20, 21, 22, 23])) and f.lbyr == 0
            and f.lbyrd == 0 and f.lbmon == f.lbmond):
        aux_coords_and_dims.append((AuxCoord(f.lbmon,
                                             long_name='month_number'), None))
        aux_coords_and_dims.append((AuxCoord(calendar.month_abbr[f.lbmon],
                                             long_name='month',
                                             units='no_unit'), None))
        aux_coords_and_dims.append((DimCoord(points=f.lbft,
                                             standard_name='forecast_period',
                                             units='hours'), None))

    # "Normal" (i.e. not cross-sectional) lats+lons (--> vector coordinates)
    if (f.bdx != 0.0 and f.bdx != f.bmdi and len(f.lbcode) != 5
            and f.lbcode[0] == 1):
        dim_coords_and_dims.append(
            (DimCoord.from_regular(f.bzx,
                                   f.bdx,
                                   f.lbnpt,
                                   standard_name=f._x_coord_name(),
                                   units='degrees',
                                   circular=(f.lbhem in [0, 4]),
                                   coord_system=f.coord_system()), 1))

    if (f.bdx != 0.0 and f.bdx != f.bmdi and len(f.lbcode) != 5
            and f.lbcode[0] == 2):
        dim_coords_and_dims.append(
            (DimCoord.from_regular(f.bzx,
                                   f.bdx,
                                   f.lbnpt,
                                   standard_name=f._x_coord_name(),
                                   units='degrees',
                                   circular=(f.lbhem in [0, 4]),
                                   coord_system=f.coord_system(),
                                   with_bounds=True), 1))

    if (f.bdy != 0.0 and f.bdy != f.bmdi and len(f.lbcode) != 5
            and f.lbcode[0] == 1):
        dim_coords_and_dims.append(
            (DimCoord.from_regular(f.bzy,
                                   f.bdy,
                                   f.lbrow,
                                   standard_name=f._y_coord_name(),
                                   units='degrees',
                                   coord_system=f.coord_system()), 0))

    if (f.bdy != 0.0 and f.bdy != f.bmdi and len(f.lbcode) != 5
            and f.lbcode[0] == 2):
        dim_coords_and_dims.append(
            (DimCoord.from_regular(f.bzy,
                                   f.bdy,
                                   f.lbrow,
                                   standard_name=f._y_coord_name(),
                                   units='degrees',
                                   coord_system=f.coord_system(),
                                   with_bounds=True), 0))

    if ((f.bdy == 0.0 or f.bdy == f.bmdi) and
        (len(f.lbcode) != 5 or (len(f.lbcode) == 5 and f.lbcode.iy == 10))):
        dim_coords_and_dims.append(
            (DimCoord(f.y,
                      standard_name=f._y_coord_name(),
                      units='degrees',
                      bounds=f.y_bounds,
                      coord_system=f.coord_system()), 0))

    if ((f.bdx == 0.0 or f.bdx == f.bmdi) and
        (len(f.lbcode) != 5 or (len(f.lbcode) == 5 and f.lbcode.ix == 11))):
        dim_coords_and_dims.append(
            (DimCoord(f.x,
                      standard_name=f._x_coord_name(),
                      units='degrees',
                      bounds=f.x_bounds,
                      circular=(f.lbhem in [0, 4]),
                      coord_system=f.coord_system()), 1))

    # Cross-sectional vertical level types (--> vector coordinates)
    if (len(f.lbcode) == 5 and f.lbcode.iy == 2
            and (f.bdy == 0 or f.bdy == f.bmdi)):
        dim_coords_and_dims.append((DimCoord(f.y,
                                             standard_name='height',
                                             units='km',
                                             bounds=f.y_bounds,
                                             attributes={'positive':
                                                         'up'}), 0))

    if (len(f.lbcode) == 5 and f.lbcode[-1] == 1 and f.lbcode.iy == 4):
        dim_coords_and_dims.append((DimCoord(f.y,
                                             standard_name='depth',
                                             units='m',
                                             bounds=f.y_bounds,
                                             attributes={'positive':
                                                         'down'}), 0))

    if (len(f.lbcode) == 5 and f.lbcode.ix == 10 and f.bdx != 0
            and f.bdx != f.bmdi):
        dim_coords_and_dims.append(
            (DimCoord.from_regular(f.bzx,
                                   f.bdx,
                                   f.lbnpt,
                                   standard_name=f._y_coord_name(),
                                   units='degrees',
                                   coord_system=f.coord_system()), 1))

    if (len(f.lbcode) == 5 and f.lbcode.iy == 1
            and (f.bdy == 0 or f.bdy == f.bmdi)):
        dim_coords_and_dims.append((DimCoord(f.y,
                                             long_name='pressure',
                                             units='hPa',
                                             bounds=f.y_bounds), 0))

    if (len(f.lbcode) == 5 and f.lbcode.ix == 1
            and (f.bdx == 0 or f.bdx == f.bmdi)):
        dim_coords_and_dims.append((DimCoord(f.x,
                                             long_name='pressure',
                                             units='hPa',
                                             bounds=f.x_bounds), 1))

    # Cross-sectional time values (--> vector coordinates)
    if (len(f.lbcode) == 5 and f.lbcode[-1] == 1 and f.lbcode.iy == 23):
        dim_coords_and_dims.append(
            (DimCoord(f.y,
                      standard_name='time',
                      units=cf_units.Unit('days since 0000-01-01 00:00:00',
                                          calendar=cf_units.CALENDAR_360_DAY),
                      bounds=f.y_bounds), 0))

    if (len(f.lbcode) == 5 and f.lbcode[-1] == 1 and f.lbcode.ix == 23):
        dim_coords_and_dims.append(
            (DimCoord(f.x,
                      standard_name='time',
                      units=cf_units.Unit('days since 0000-01-01 00:00:00',
                                          calendar=cf_units.CALENDAR_360_DAY),
                      bounds=f.x_bounds), 1))

    if (len(f.lbcode) == 5 and f.lbcode[-1] == 3 and f.lbcode.iy == 23
            and f.lbtim.ib == 2 and f.lbtim.ic == 2):
        epoch_days_unit = cf_units.Unit('days since 0000-01-01 00:00:00',
                                        calendar=cf_units.CALENDAR_360_DAY)
        t1_epoch_days = epoch_days_unit.date2num(f.t1)
        t2_epoch_days = epoch_days_unit.date2num(f.t2)
        # The end time is exclusive, not inclusive.
        dim_coords_and_dims.append((DimCoord(np.linspace(t1_epoch_days,
                                                         t2_epoch_days,
                                                         f.lbrow,
                                                         endpoint=False),
                                             standard_name='time',
                                             units=epoch_days_unit,
                                             bounds=f.y_bounds), 0))

    # Site number (--> scalar coordinate)
    if (len(f.lbcode) == 5 and f.lbcode[-1] == 1 and f.lbcode.ix == 13
            and f.bdx != 0):
        dim_coords_and_dims.append(
            (DimCoord.from_regular(f.bzx,
                                   f.bdx,
                                   f.lbnpt,
                                   long_name='site_number',
                                   units='1'), 1))

    # Site number cross-sections (???)
    if (len(f.lbcode) == 5 and 13 in [f.lbcode.ix, f.lbcode.iy]
            and 11 not in [f.lbcode.ix, f.lbcode.iy]
            and hasattr(f, 'lower_x_domain') and hasattr(f, 'upper_x_domain')
            and all(f.lower_x_domain != -1.e+30)
            and all(f.upper_x_domain != -1.e+30)):
        aux_coords_and_dims.append((AuxCoord(
            (f.lower_x_domain + f.upper_x_domain) / 2.0,
            standard_name=f._x_coord_name(),
            units='degrees',
            bounds=np.array([f.lower_x_domain, f.upper_x_domain]).T,
            coord_system=f.coord_system()), 1 if f.lbcode.ix == 13 else 0))

    if (len(f.lbcode) == 5 and 13 in [f.lbcode.ix, f.lbcode.iy]
            and 10 not in [f.lbcode.ix, f.lbcode.iy]
            and hasattr(f, 'lower_y_domain') and hasattr(f, 'upper_y_domain')
            and all(f.lower_y_domain != -1.e+30)
            and all(f.upper_y_domain != -1.e+30)):
        aux_coords_and_dims.append((AuxCoord(
            (f.lower_y_domain + f.upper_y_domain) / 2.0,
            standard_name=f._y_coord_name(),
            units='degrees',
            bounds=np.array([f.lower_y_domain, f.upper_y_domain]).T,
            coord_system=f.coord_system()), 1 if f.lbcode.ix == 13 else 0))

    # LBPROC codings (--> cell method + attributes)
    unhandled_lbproc = True
    zone_method = None
    time_method = None
    if f.lbproc == 0:
        unhandled_lbproc = False
    elif f.lbproc == 64:
        zone_method = 'mean'
    elif f.lbproc == 128:
        time_method = 'mean'
    elif f.lbproc == 4096:
        time_method = 'minimum'
    elif f.lbproc == 8192:
        time_method = 'maximum'
    elif f.lbproc == 192:
        time_method = 'mean'
        zone_method = 'mean'

    if time_method is not None:
        if f.lbtim.ia != 0:
            intervals = '{} hour'.format(f.lbtim.ia)
        else:
            intervals = None

        if f.lbtim.ib == 2:
            # Aggregation over a period of time.
            cell_methods.append(
                CellMethod(time_method, coords='time', intervals=intervals))
            unhandled_lbproc = False
        elif f.lbtim.ib == 3 and f.lbproc == 128:
            # Aggregation over a period of time within a year, over a number
            # of years.
            # Only mean (lbproc of 128) is handled as the min/max
            # interpretation is ambiguous e.g. decadal mean of daily max,
            # decadal max of daily mean, decadal mean of max daily mean etc.
            cell_methods.append(
                CellMethod('{} within years'.format(time_method),
                           coords='time',
                           intervals=intervals))
            cell_methods.append(
                CellMethod('{} over years'.format(time_method), coords='time'))
            unhandled_lbproc = False
        else:
            # Generic cell method to indicate a time aggregation.
            cell_methods.append(CellMethod(time_method, coords='time'))
            unhandled_lbproc = False

    if zone_method is not None:
        if f.lbcode == 1:
            cell_methods.append(CellMethod(zone_method, coords='longitude'))
            for coord, _dim in dim_coords_and_dims:
                if coord.standard_name == 'longitude':
                    if len(coord.points) == 1:
                        coord.bounds = np.array([0., 360.], dtype=np.float32)
                    else:
                        coord.guess_bounds()
            unhandled_lbproc = False
        elif f.lbcode == 101:
            cell_methods.append(
                CellMethod(zone_method, coords='grid_longitude'))
            for coord, _dim in dim_coords_and_dims:
                if coord.standard_name == 'grid_longitude':
                    if len(coord.points) == 1:
                        coord.bounds = np.array([0., 360.], dtype=np.float32)
                    else:
                        coord.guess_bounds()
            unhandled_lbproc = False
        else:
            unhandled_lbproc = True

    if unhandled_lbproc:
        attributes["ukmo__process_flags"] = tuple(
            sorted([
                name for value, name in six.iteritems(LBPROC_MAP)
                if isinstance(value, int) and f.lbproc & value
            ]))

    if (f.lbsrce % 10000) == 1111:
        attributes['source'] = 'Data from Met Office Unified Model'
        # Also define MO-netCDF compliant UM version.
        um_major = (f.lbsrce // 10000) // 100
        if um_major != 0:
            um_minor = (f.lbsrce // 10000) % 100
            attributes['um_version'] = '{:d}.{:d}'.format(um_major, um_minor)

    if (f.lbuser[6] != 0 or (f.lbuser[3] // 1000) != 0
            or (f.lbuser[3] % 1000) != 0):
        attributes['STASH'] = f.stash

    if str(f.stash) in STASH_TO_CF:
        standard_name = STASH_TO_CF[str(f.stash)].standard_name
        units = STASH_TO_CF[str(f.stash)].units
        long_name = STASH_TO_CF[str(f.stash)].long_name

    if (not f.stash.is_valid and f.lbfc in LBFC_TO_CF):
        standard_name = LBFC_TO_CF[f.lbfc].standard_name
        units = LBFC_TO_CF[f.lbfc].units
        long_name = LBFC_TO_CF[f.lbfc].long_name

    # Orography reference field (--> reference target)
    if f.lbuser[3] == 33:
        references.append(ReferenceTarget('orography', None))

    # Surface pressure reference field (--> reference target)
    if f.lbuser[3] == 409 or f.lbuser[3] == 1:
        references.append(ReferenceTarget('surface_air_pressure', None))

    return (references, standard_name, long_name, units, attributes,
            cell_methods, dim_coords_and_dims, aux_coords_and_dims)
Пример #20
0
 def test_coord_stash(self):
     token = "stash"
     coord = AuxCoord(1, attributes=dict(STASH=token))
     self._check(token, coord, default=True)
Пример #21
0
def _cf_height_from_name(z_coord, lower_bound=None, upper_bound=None):
    """
    Parser for the z component of field headings.

    This parse is specifically for handling the z component of NAME field
    headings, which include height above ground level, height above sea level
    and flight level etc.  This function returns an iris coordinate
    representing this field heading.

    Args:

    * z_coord (list):
        A field heading, specifically the z component.

    Returns:
        An instance of :class:`iris.coords.AuxCoord` representing the
        interpretation of the supplied field heading.

    """

    # NAMEII - integer/float support.
    # Match against height agl, asl and Pa.
    pattern = re.compile(r'^From\s*'
                         '(?P<lower_bound>[0-9]+(\.[0-9]+)?)'
                         '\s*-\s*'
                         '(?P<upper_bound>[0-9]+(\.[0-9]+)?)'
                         '\s*(?P<type>m\s*asl|m\s*agl|Pa)'
                         '(?P<extra>.*)')

    # Match against flight level.
    pattern_fl = re.compile(r'^From\s*'
                            '(?P<type>FL)'
                            '(?P<lower_bound>[0-9]+(\.[0-9]+)?)'
                            '\s*-\s*FL'
                            '(?P<upper_bound>[0-9]+(\.[0-9]+)?)'
                            '(?P<extra>.*)')

    # NAMEIII - integer/float support.
    # Match scalar against height agl, asl, Pa, FL
    pattern_scalar = re.compile(r'Z\s*=\s*'
                                '(?P<point>[0-9]+(\.[0-9]+)?([eE][+-]?\d+)?)'
                                '\s*(?P<type>m\s*agl|m\s*asl|FL|Pa)'
                                '(?P<extra>.*)')

    type_name = {'magl': 'height', 'masl': 'altitude', 'FL': 'flight_level',
                 'Pa': 'air_pressure'}
    patterns = [pattern, pattern_fl, pattern_scalar]

    units = 'no-unit'
    points = z_coord
    bounds = None
    standard_name = None
    long_name = 'z'

    if upper_bound is not None and lower_bound is not None:
        match_ub = pattern_scalar.match(upper_bound)
        match_lb = pattern_scalar.match(lower_bound)

    for pattern in patterns:
        match = pattern.match(z_coord)
        if match:
            match = match.groupdict()
            # Do not interpret if there is additional information to the match
            if match['extra']:
                break
            units = match['type'].replace(' ', '')
            name = type_name[units]

            # Interpret points if present.
            if 'point' in match:
                points = float(match['point'])
                if upper_bound is not None and lower_bound is not None:
                    bounds = np.array([float(match_lb.groupdict()['point']),
                                       float(match_ub.groupdict()['point'])])
            # Interpret points from bounds.
            else:
                bounds = np.array([float(match['lower_bound']),
                                   float(match['upper_bound'])])
                points = bounds.sum() / 2.

            long_name = None
            if name == 'altitude':
                units = units[0]
                standard_name = name
                long_name = 'altitude above sea level'
            elif name == 'height':
                units = units[0]
                standard_name = name
                long_name = 'height above ground level'
            elif name == 'air_pressure':
                standard_name = name
            elif name == 'flight_level':
                long_name = name
            units = _parse_units(units)

            break

    coord = AuxCoord(points, units=units, standard_name=standard_name,
                     long_name=long_name, bounds=bounds)

    return coord
Пример #22
0
 def test_coord_stash_default(self):
     token = "_stash"  # includes leading underscore
     coord = AuxCoord(1, attributes=dict(STASH=token))
     self._check(token, coord, default=True)
Пример #23
0
def convert(grib):
    """
    Converts a GRIB message into the corresponding items of Cube metadata.

    Args:

    * grib:
        A :class:`~iris.fileformats.grib.GribWrapper` object.

    Returns:
        A :class:`iris.fileformats.rules.ConversionMetadata` object.

    """
    factories = []
    references = []
    standard_name = None
    long_name = None
    units = None
    attributes = {}
    cell_methods = []
    dim_coords_and_dims = []
    aux_coords_and_dims = []

    # deprecation warning for this code path for edition 2 messages
    if grib.edition == 2:
        msg = ('This GRIB loader is deprecated and will be removed in '
               'a future release.  Please consider using the new '
               'GRIB loader by setting the :class:`iris.Future` '
               'option `strict_grib_load` to True; e.g.:\n'
               'iris.FUTURE.strict_grib_load = True\n'
               'Please report issues you experience to:\n'
               'https://groups.google.com/forum/#!topic/scitools-iris-dev/'
               'lMsOusKNfaU')
        warn_deprecated(msg)

    if \
            (grib.gridType=="reduced_gg"):
        aux_coords_and_dims.append(
            (AuxCoord(grib._y_points,
                      grib._y_coord_name,
                      units='degrees',
                      coord_system=grib._coord_system), 0))
        aux_coords_and_dims.append(
            (AuxCoord(grib._x_points,
                      grib._x_coord_name,
                      units='degrees',
                      coord_system=grib._coord_system), 0))

    if \
            (grib.gridType=="regular_ll") and \
            (grib.jPointsAreConsecutive == 0):
        dim_coords_and_dims.append(
            (DimCoord(grib._y_points,
                      grib._y_coord_name,
                      units='degrees',
                      coord_system=grib._coord_system), 0))
        dim_coords_and_dims.append((DimCoord(grib._x_points,
                                             grib._x_coord_name,
                                             units='degrees',
                                             coord_system=grib._coord_system,
                                             circular=grib._x_circular), 1))

    if \
            (grib.gridType=="regular_ll") and \
            (grib.jPointsAreConsecutive == 1):
        dim_coords_and_dims.append(
            (DimCoord(grib._y_points,
                      grib._y_coord_name,
                      units='degrees',
                      coord_system=grib._coord_system), 1))
        dim_coords_and_dims.append((DimCoord(grib._x_points,
                                             grib._x_coord_name,
                                             units='degrees',
                                             coord_system=grib._coord_system,
                                             circular=grib._x_circular), 0))

    if \
            (grib.gridType=="regular_gg") and \
            (grib.jPointsAreConsecutive == 0):
        dim_coords_and_dims.append(
            (DimCoord(grib._y_points,
                      grib._y_coord_name,
                      units='degrees',
                      coord_system=grib._coord_system), 0))
        dim_coords_and_dims.append((DimCoord(grib._x_points,
                                             grib._x_coord_name,
                                             units='degrees',
                                             coord_system=grib._coord_system,
                                             circular=grib._x_circular), 1))

    if \
            (grib.gridType=="regular_gg") and \
            (grib.jPointsAreConsecutive == 1):
        dim_coords_and_dims.append(
            (DimCoord(grib._y_points,
                      grib._y_coord_name,
                      units='degrees',
                      coord_system=grib._coord_system), 1))
        dim_coords_and_dims.append((DimCoord(grib._x_points,
                                             grib._x_coord_name,
                                             units='degrees',
                                             coord_system=grib._coord_system,
                                             circular=grib._x_circular), 0))

    if \
            (grib.gridType=="rotated_ll") and \
            (grib.jPointsAreConsecutive == 0):
        dim_coords_and_dims.append(
            (DimCoord(grib._y_points,
                      grib._y_coord_name,
                      units='degrees',
                      coord_system=grib._coord_system), 0))
        dim_coords_and_dims.append((DimCoord(grib._x_points,
                                             grib._x_coord_name,
                                             units='degrees',
                                             coord_system=grib._coord_system,
                                             circular=grib._x_circular), 1))

    if \
            (grib.gridType=="rotated_ll") and \
            (grib.jPointsAreConsecutive == 1):
        dim_coords_and_dims.append(
            (DimCoord(grib._y_points,
                      grib._y_coord_name,
                      units='degrees',
                      coord_system=grib._coord_system), 1))
        dim_coords_and_dims.append((DimCoord(grib._x_points,
                                             grib._x_coord_name,
                                             units='degrees',
                                             coord_system=grib._coord_system,
                                             circular=grib._x_circular), 0))

    if grib.gridType in ["polar_stereographic", "lambert"]:
        dim_coords_and_dims.append(
            (DimCoord(grib._y_points,
                      grib._y_coord_name,
                      units="m",
                      coord_system=grib._coord_system), 0))
        dim_coords_and_dims.append(
            (DimCoord(grib._x_points,
                      grib._x_coord_name,
                      units="m",
                      coord_system=grib._coord_system), 1))

    if \
            (grib.edition == 1) and \
            (grib.table2Version < 128) and \
            (grib.indicatorOfParameter == 11) and \
            (grib._cf_data is None):
        standard_name = "air_temperature"
        units = "kelvin"

    if \
            (grib.edition == 1) and \
            (grib.table2Version < 128) and \
            (grib.indicatorOfParameter == 33) and \
            (grib._cf_data is None):
        standard_name = "x_wind"
        units = "m s-1"

    if \
            (grib.edition == 1) and \
            (grib.table2Version < 128) and \
            (grib.indicatorOfParameter == 34) and \
            (grib._cf_data is None):
        standard_name = "y_wind"
        units = "m s-1"

    if \
            (grib.edition == 1) and \
            (grib._cf_data is not None):
        standard_name = grib._cf_data.standard_name
        long_name = grib._cf_data.standard_name or grib._cf_data.long_name
        units = grib._cf_data.units

    if \
            (grib.edition == 1) and \
            (grib.table2Version >= 128) and \
            (grib._cf_data is None):
        long_name = "UNKNOWN LOCAL PARAM " + str(
            grib.indicatorOfParameter) + "." + str(grib.table2Version)
        units = "???"

    if \
            (grib.edition == 1) and \
            (grib.table2Version == 1) and \
            (grib.indicatorOfParameter >= 128):
        long_name = "UNKNOWN LOCAL PARAM " + str(
            grib.indicatorOfParameter) + "." + str(grib.table2Version)
        units = "???"

    if \
            (grib.edition == 2) and \
            (grib._cf_data is not None):
        standard_name = grib._cf_data.standard_name
        long_name = grib._cf_data.long_name
        units = grib._cf_data.units

    if \
            (grib.edition == 1) and \
            (grib._phenomenonDateTime != -1.0):
        aux_coords_and_dims.append(
            (DimCoord(points=grib.startStep,
                      standard_name='forecast_period',
                      units=grib._forecastTimeUnit), None))
        aux_coords_and_dims.append(
            (DimCoord(points=grib.phenomenon_points('hours'),
                      standard_name='time',
                      units=Unit('hours since epoch',
                                 CALENDAR_GREGORIAN)), None))

    def add_bounded_time_coords(aux_coords_and_dims, grib):
        t_bounds = grib.phenomenon_bounds('hours')
        period = Unit('hours').convert(t_bounds[1] - t_bounds[0],
                                       grib._forecastTimeUnit)
        aux_coords_and_dims.append(
            (DimCoord(standard_name='forecast_period',
                      units=grib._forecastTimeUnit,
                      points=grib._forecastTime + 0.5 * period,
                      bounds=[grib._forecastTime,
                              grib._forecastTime + period]), None))
        aux_coords_and_dims.append(
            (DimCoord(standard_name='time',
                      units=Unit('hours since epoch', CALENDAR_GREGORIAN),
                      points=0.5 * (t_bounds[0] + t_bounds[1]),
                      bounds=t_bounds), None))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 2):
        add_bounded_time_coords(aux_coords_and_dims, grib)

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 3):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("mean", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 4):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("sum", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 5):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("_difference", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 51):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("mean", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 113):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("mean", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 114):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("sum", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 115):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("mean", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 116):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("sum", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 117):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("mean", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 118):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("_covariance", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 123):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("mean", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 124):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("sum", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 125):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("standard_deviation", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber in [0, 1]):
        aux_coords_and_dims.append(
            (DimCoord(points=Unit(grib._forecastTimeUnit).convert(
                np.int32(grib._forecastTime), "hours"),
                      standard_name='forecast_period',
                      units="hours"), None))
        aux_coords_and_dims.append(
            (DimCoord(points=grib.phenomenon_points('hours'),
                      standard_name='time',
                      units=Unit('hours since epoch',
                                 CALENDAR_GREGORIAN)), None))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber in (8, 9, 11)):
        add_bounded_time_coords(aux_coords_and_dims, grib)

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber in (1, 11)) and \
            (grib.perturbationNumber is not None):
        cell_methods.append(
            CellMethod('realization',
                       coords=('realization', ),
                       intervals=('1', ),
                       comments=(' ENS', )))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber in (8, 11)) and \
            (grib.typeOfStatisticalProcessing == 0):
        cell_methods.append(CellMethod("mean", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber in (8, 11)) and \
            (grib.typeOfStatisticalProcessing == 1):
        cell_methods.append(CellMethod("sum", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber in (8, 11)) and \
            (grib.typeOfStatisticalProcessing == 2):
        cell_methods.append(CellMethod("maximum", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber in (8, 11)) and \
            (grib.typeOfStatisticalProcessing == 3):
        cell_methods.append(CellMethod("minimum", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber in (8, 11)) and \
            (grib.typeOfStatisticalProcessing == 4):
        cell_methods.append(CellMethod("_difference", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber in (8, 11)) and \
            (grib.typeOfStatisticalProcessing == 5):
        cell_methods.append(CellMethod("_root_mean_square", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber in (8, 11)) and \
            (grib.typeOfStatisticalProcessing == 6):
        cell_methods.append(CellMethod("standard_deviation", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber in (8, 11)) and \
            (grib.typeOfStatisticalProcessing == 7):
        cell_methods.append(CellMethod("_convariance", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber in (8, 11)) and \
            (grib.typeOfStatisticalProcessing == 8):
        cell_methods.append(CellMethod("_difference", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber in (8, 11)) and \
            (grib.typeOfStatisticalProcessing == 9):
        cell_methods.append(CellMethod("_ratio", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.levelType == 'pl'):
        aux_coords_and_dims.append((DimCoord(points=grib.level,
                                             long_name="pressure",
                                             units="hPa"), None))

    if \
            (grib.edition == 1) and \
            (grib.levelType == 'sfc'):

        if (grib._cf_data is not None) and \
        (grib._cf_data.set_height is not None):
            aux_coords_and_dims.append(
                (DimCoord(points=grib._cf_data.set_height,
                          long_name="height",
                          units="m",
                          attributes={'positive': 'up'}), None))
        elif grib.typeOfLevel == 'heightAboveGround':  # required for NCAR
            aux_coords_and_dims.append((DimCoord(points=grib.level,
                                                 long_name="height",
                                                 units="m",
                                                 attributes={'positive':
                                                             'up'}), None))

    if \
            (grib.edition == 1) and \
            (grib.levelType == 'ml') and \
            (hasattr(grib, 'pv')):
        aux_coords_and_dims.append(
            (AuxCoord(grib.level,
                      standard_name='model_level_number',
                      attributes={'positive': 'up'}), None))
        aux_coords_and_dims.append((DimCoord(grib.pv[grib.level],
                                             long_name='level_pressure',
                                             units='Pa'), None))
        aux_coords_and_dims.append((AuxCoord(
            grib.pv[grib.numberOfCoordinatesValues // 2 + grib.level],
            long_name='sigma'), None))
        factories.append(
            Factory(HybridPressureFactory, [{
                'long_name': 'level_pressure'
            }, {
                'long_name': 'sigma'
            },
                                            Reference('surface_pressure')]))

    if \
            (grib.edition == 2) and \
            (grib.typeOfFirstFixedSurface != grib.typeOfSecondFixedSurface):
        warnings.warn("Different vertical bound types not yet handled.")

    if \
            (grib.edition == 2) and \
            (grib.typeOfFirstFixedSurface == 103) and \
            (grib.typeOfSecondFixedSurface == 255):
        aux_coords_and_dims.append(
            (DimCoord(points=grib.scaledValueOfFirstFixedSurface /
                      (10.0**grib.scaleFactorOfFirstFixedSurface),
                      standard_name="height",
                      units="m"), None))

    if \
            (grib.edition == 2) and \
            (grib.typeOfFirstFixedSurface == 103) and \
            (grib.typeOfSecondFixedSurface != 255):
        aux_coords_and_dims.append((DimCoord(
            points=0.5 * (grib.scaledValueOfFirstFixedSurface /
                          (10.0**grib.scaleFactorOfFirstFixedSurface) +
                          grib.scaledValueOfSecondFixedSurface /
                          (10.0**grib.scaleFactorOfSecondFixedSurface)),
            standard_name="height",
            units="m",
            bounds=[
                grib.scaledValueOfFirstFixedSurface /
                (10.0**grib.scaleFactorOfFirstFixedSurface),
                grib.scaledValueOfSecondFixedSurface /
                (10.0**grib.scaleFactorOfSecondFixedSurface)
            ]), None))

    if \
            (grib.edition == 2) and \
            (grib.typeOfFirstFixedSurface == 100) and \
            (grib.typeOfSecondFixedSurface == 255):
        aux_coords_and_dims.append(
            (DimCoord(points=grib.scaledValueOfFirstFixedSurface /
                      (10.0**grib.scaleFactorOfFirstFixedSurface),
                      long_name="pressure",
                      units="Pa"), None))

    if \
            (grib.edition == 2) and \
            (grib.typeOfFirstFixedSurface == 100) and \
            (grib.typeOfSecondFixedSurface != 255):
        aux_coords_and_dims.append((DimCoord(
            points=0.5 * (grib.scaledValueOfFirstFixedSurface /
                          (10.0**grib.scaleFactorOfFirstFixedSurface) +
                          grib.scaledValueOfSecondFixedSurface /
                          (10.0**grib.scaleFactorOfSecondFixedSurface)),
            long_name="pressure",
            units="Pa",
            bounds=[
                grib.scaledValueOfFirstFixedSurface /
                (10.0**grib.scaleFactorOfFirstFixedSurface),
                grib.scaledValueOfSecondFixedSurface /
                (10.0**grib.scaleFactorOfSecondFixedSurface)
            ]), None))

    # required for NCMRWF
    if \
            (grib.edition == 2) and \
            (grib.typeOfFirstFixedSurface == 106) and \
            (grib.typeOfSecondFixedSurface != 255):
        aux_coords_and_dims.append((DimCoord(
            points=0.5 * (grib.scaledValueOfFirstFixedSurface /
                          (10.0**grib.scaleFactorOfFirstFixedSurface) +
                          grib.scaledValueOfSecondFixedSurface /
                          (10.0**grib.scaleFactorOfSecondFixedSurface)),
            standard_name="depth",
            long_name="depth_below_land_surface",
            units="m",
            bounds=[
                grib.scaledValueOfFirstFixedSurface /
                (10.0**grib.scaleFactorOfFirstFixedSurface),
                grib.scaledValueOfSecondFixedSurface /
                (10.0**grib.scaleFactorOfSecondFixedSurface)
            ]), None))

    if \
            (grib.edition == 2) and \
            (grib.typeOfFirstFixedSurface in [105, 119]) and \
            (grib.numberOfCoordinatesValues > 0):
        aux_coords_and_dims.append(
            (AuxCoord(grib.scaledValueOfFirstFixedSurface,
                      standard_name='model_level_number',
                      attributes={'positive': 'up'}), None))
        aux_coords_and_dims.append(
            (DimCoord(grib.pv[grib.scaledValueOfFirstFixedSurface],
                      long_name='level_pressure',
                      units='Pa'), None))
        aux_coords_and_dims.append(
            (AuxCoord(grib.pv[grib.numberOfCoordinatesValues // 2 +
                              grib.scaledValueOfFirstFixedSurface],
                      long_name='sigma'), None))
        factories.append(
            Factory(HybridPressureFactory,
                    [{
                        'long_name': 'level_pressure'
                    }, {
                        'long_name': 'sigma'
                    },
                     Reference('surface_air_pressure')]))

    if grib._originatingCentre != 'unknown':
        aux_coords_and_dims.append((AuxCoord(points=grib._originatingCentre,
                                             long_name='originating_centre',
                                             units='no_unit'), None))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber in [1, 11]):
        aux_coords_and_dims.append((DimCoord(points=grib.perturbationNumber,
                                             standard_name='realization',
                                             long_name='ensemble_member',
                                             units='no_unit'), None))

    if \
            (grib.edition == 2) and \
            grib.productDefinitionTemplateNumber not in (0, 1, 8, 11):
        attributes["GRIB_LOAD_WARNING"] = (
            "unsupported GRIB%d ProductDefinitionTemplate: #4.%d" %
            (grib.edition, grib.productDefinitionTemplateNumber))

    if \
            (grib.edition == 2) and \
            (grib.centre == 'ecmf') and \
            (grib.discipline == 0) and \
            (grib.parameterCategory == 3) and \
            (grib.parameterNumber == 25) and \
            (grib.typeOfFirstFixedSurface == 105):
        references.append(
            ReferenceTarget(
                'surface_air_pressure', lambda cube: {
                    'standard_name': 'surface_air_pressure',
                    'units': 'Pa',
                    'data': np.exp(cube.data)
                }))

    return ConversionMetadata(factories, references, standard_name, long_name,
                              units, attributes, cell_methods,
                              dim_coords_and_dims, aux_coords_and_dims)
Пример #24
0
 def test_mixture(self):
     token = "air_temperature"
     coord = AuxCoord(1, standard_name=token)
     result = CellMethod(self.method, coords=[coord, token])
     expected = "{}: {}, {}".format(self.method, token, token)
     self.assertEqual(str(result), expected)
Пример #25
0
 def setUp(self):
     self.cube1 = Cube([1, 2, 3], "air_temperature", units="K")
     self.cube1.add_aux_coord(AuxCoord([0], "height", units="m"))
Пример #26
0
 def test_mixture_default(self):
     token = "air temperature"  # includes space
     coord = AuxCoord(1, long_name=token)
     result = CellMethod(self.method, coords=[coord, token])
     expected = "{}: unknown, unknown".format(self.method)
     self.assertEqual(str(result), expected)
Пример #27
0
    def test_multidim(self):
        # Testing with >2D data to demonstrate correct operation over
        # additional non-XY dimensions (including data masking), which is
        # handled by the PointInCell wrapper class.

        # Define a simple target grid first, in plain latlon coordinates.
        plain_latlon_cs = GeogCS(EARTH_RADIUS)
        grid_x_coord = DimCoord(points=[15.0, 25.0, 35.0],
                                bounds=[[10.0, 20.0],
                                        [20.0, 30.0],
                                        [30.0, 40.0]],
                                standard_name='longitude',
                                units='degrees',
                                coord_system=plain_latlon_cs)
        grid_y_coord = DimCoord(points=[-30.0, -50.0],
                                bounds=[[-20.0, -40.0], [-40.0, -60.0]],
                                standard_name='latitude',
                                units='degrees',
                                coord_system=plain_latlon_cs)
        grid_cube = Cube(np.zeros((2, 3)))
        grid_cube.add_dim_coord(grid_y_coord, 0)
        grid_cube.add_dim_coord(grid_x_coord, 1)

        # Define some key points in true-lat/lon thta have known positions
        # First 3x2 points in the centre of each output cell.
        x_centres, y_centres = np.meshgrid(grid_x_coord.points,
                                           grid_y_coord.points)
        # An extra point also falling in cell 1, 1
        x_in11, y_in11 = 26.3, -48.2
        # An extra point completely outside the target grid
        x_out, y_out = 70.0, -40.0

        # Define a rotated coord system for the source data
        pole_lon, pole_lat = -125.3, 53.4
        src_cs = RotatedGeogCS(grid_north_pole_latitude=pole_lat,
                               grid_north_pole_longitude=pole_lon,
                               ellipsoid=plain_latlon_cs)

        # Concatenate all the testpoints in a flat array, and find the rotated
        # equivalents.
        xx = list(x_centres.flat[:]) + [x_in11, x_out]
        yy = list(y_centres.flat[:]) + [y_in11, y_out]
        xx, yy = rotate_pole(lons=np.array(xx),
                             lats=np.array(yy),
                             pole_lon=pole_lon,
                             pole_lat=pole_lat)
        # Define handy index numbers for all these.
        i00, i01, i02, i10, i11, i12, i_in, i_out = range(8)

        # Build test data in the shape Z,YX = (3, 8)
        data = [[1, 2, 3, 11, 12, 13, 7, 99],
                [1, 2, 3, 11, 12, 13, 7, 99],
                [7, 6, 5, 51, 52, 53, 12, 1]]
        mask = [[0, 0, 0, 0, 0, 0, 0, 0],
                [0, 1, 0, 0, 0, 0, 1, 0],
                [0, 0, 0, 0, 0, 0, 0, 0]]
        src_data = np.ma.array(data, mask=mask, dtype=float)

        # Make the source cube.
        src_cube = Cube(src_data)
        src_x = AuxCoord(xx,
                         standard_name='grid_longitude',
                         units='degrees',
                         coord_system=src_cs)
        src_y = AuxCoord(yy,
                         standard_name='grid_latitude',
                         units='degrees',
                         coord_system=src_cs)
        src_z = DimCoord(np.arange(3), long_name='z')
        src_cube.add_dim_coord(src_z, 0)
        src_cube.add_aux_coord(src_x, 1)
        src_cube.add_aux_coord(src_y, 1)
        # Add in some extra metadata, to ensure it gets copied over.
        src_cube.add_aux_coord(DimCoord([0], long_name='extra_scalar_coord'))
        src_cube.attributes['extra_attr'] = 12.3

        # Define what the expected answers should be, shaped (3, 2, 3).
        expected_result = [
            [[1.0, 2.0, 3.0],
             [11.0, 0.5 * (12 + 7), 13.0]],
            [[1.0, -999, 3.0],
             [11.0, 12.0, 13.0]],
            [[7.0, 6.0, 5.0],
             [51.0, 0.5 * (52 + 12), 53.0]],
            ]
        expected_result = np.ma.masked_less(expected_result, 0)

        # Perform the calculation with the regridder.
        regridder = Regridder(src_cube, grid_cube)

        # Check all is as expected.
        result = regridder(src_cube)
        self.assertEqual(result.coord('z'), src_cube.coord('z'))
        self.assertEqual(result.coord('extra_scalar_coord'),
                         src_cube.coord('extra_scalar_coord'))
        self.assertEqual(result.coord('longitude'),
                         grid_cube.coord('longitude'))
        self.assertEqual(result.coord('latitude'),
                         grid_cube.coord('latitude'))
        self.assertMaskedArrayAlmostEqual(result.data, expected_result)
Пример #28
0
def set_up_spot_cube(point_data,
                     validity_time=1487311200,
                     forecast_period=0,
                     number_of_sites=3):
    """Set up a spot data cube at a given validity time and forecast period for
       a given number of sites.

       Produces a cube with dimension coordinates of time, percentile
       and index. There will be one point in the percentile and time
       coordinates, and as many points in index coordinate as number_of_sites.
       The output cube will also have auxillary coordinates for altitude,
       wmo_site, forecast_period, and forecast_reference_time.

       Args:
           point_data (float):
               The value for the data in the cube, which will be used for
               every site.
       Keyword Args:
           validity_time (float):
               The value for the validity time for your data, defaults to
               1487311200 i.e. 2017-02-17 06:00:00
           forecast_period (float):
               The forecast period for your cube in hours.
           number_of_sites (int):
               The number of sites you want in your output cube.
       Returns:
           cube (iris.cube.Cube):
               Example spot data cube.
    """
    # Set up a data array with all the values the same as point_data.
    data = np.ones((1, 1, number_of_sites)) * point_data
    # Set up dimension coordinates.
    time = DimCoord(np.array([validity_time]),
                    standard_name='time',
                    units=cf_units.Unit('seconds since 1970-01-01 00:00:00',
                                        calendar='gregorian'))
    percentile = DimCoord(np.array([50.]), long_name="percentile", units='%')
    indices = np.arange(number_of_sites)
    index = DimCoord(indices, units=cf_units.Unit('1'), long_name='index')
    # Set up auxillary coordinates.
    latitudes = np.ones(number_of_sites) * 54
    latitude = AuxCoord(latitudes,
                        standard_name='latitude',
                        units='degrees',
                        coord_system=GeogCS(6371229.0))
    longitudes = np.arange(number_of_sites)
    longitude = AuxCoord(longitudes,
                         standard_name='longitude',
                         units='degrees',
                         coord_system=GeogCS(6371229.0))
    altitudes = np.arange(number_of_sites) + 100
    altitude = DimCoord(altitudes, standard_name='altitude', units='m')
    wmo_sites = np.arange(number_of_sites) + 1000
    wmo_site = AuxCoord(wmo_sites,
                        units=cf_units.Unit('1'),
                        long_name='wmo_site')
    forecast_period_coord = AuxCoord(np.array(forecast_period * 3600),
                                     standard_name='forecast_period',
                                     units='seconds')
    # Create cube
    cube = Cube(data,
                standard_name="air_temperature",
                dim_coords_and_dims=[
                    (time, 0),
                    (percentile, 1),
                    (index, 2),
                ],
                aux_coords_and_dims=[(latitude, 2), (longitude, 2),
                                     (altitude, 2), (wmo_site, 2),
                                     (forecast_period_coord, 0)],
                units="K")
    # Add scalar forecast_reference_time.
    cycle_time = validity_time - forecast_period * 3600
    forecast_reference_time = AuxCoord(np.array([cycle_time]),
                                       standard_name='forecast_reference_time',
                                       units=cf_units.Unit(
                                           'seconds since 1970-01-01 00:00:00',
                                           calendar='gregorian'))
    cube.add_aux_coord(forecast_reference_time)
    return cube
Пример #29
0
 def test_numpy_simple(self):
     points = np.arange(12).reshape(4, 3)
     coord = AuxCoord(points)
     result = AuxCoordFactory._nd_points(coord, (0, 1), 2)
     expected = points
     self.assertArrayEqual(result, expected)
Пример #30
0
 def test_coord_with_irregular_step(self):
     # Check that a `CoordinateNotRegularError` is captured.
     coord = AuxCoord(np.array([2, 5, 1, 4]))
     result = is_regular(coord)
     self.assertFalse(result)