Beispiel #1
0
 def test_set_lazy_bounds(self):
     # Setting new lazy bounds.
     coord = AuxCoord(self.pts_real, bounds=self.bds_real)
     new_bounds = self.bds_lazy + 102.3
     coord.bounds = new_bounds
     result = coord.core_bounds()
     self.assertEqualLazyArraysAndDtypes(result, new_bounds)
Beispiel #2
0
    def _add_available_aux_coords(self, cube, filenames):
        from iris.aux_factory import HybridPressureFactory
        from iris.coords import AuxCoord
        from iris.exceptions import CoordinateNotFoundError
        import iris

        if cube.coords('hybrid A coefficient at layer midpoints'):

            # First convert the hybrid coefficients to hPa, so that air pressure will be in hPa
            cube.coord('hybrid A coefficient at layer midpoints').convert_units('hPa')

            try:
                surface_pressure = cube.coord('surface pressure')
            except iris.exceptions.CoordinateNotFoundError as e:
                # If there isn't a surface pressure coordinate we can try and pull out the lowest pressure level
                with demote_warnings():
                    surface_pressure_cubes = iris.load(filenames, 'atmospheric pressure at interfaces',
                                                       callback=self.load_multiple_files_callback)
                surface_pressure_cube = surface_pressure_cubes.concatenate_cube()[:, -1, :, :]
                surface_pressure = AuxCoord(points=surface_pressure_cube.data, long_name='surface pressure', units='Pa')
                cube.add_aux_coord(surface_pressure, (0, 2, 3))
 
            surface_pressure.convert_units('hPa')
 
            if len(cube.coords(long_name='hybrid level at layer midpoints')) > 0:
                cube.add_aux_factory(HybridPressureFactory(delta=cube.coord('hybrid A coefficient at layer midpoints'),
                                                           sigma=cube.coord('hybrid B coefficient at layer midpoints'),
                                                          surface_air_pressure=surface_pressure))
Beispiel #3
0
 def test_2d_contiguous_both_dirs(self):
     coord = AuxCoord(self.points_3by3, bounds=self.lon_bounds_3by3)
     contiguous, diffs = coord._discontiguity_in_bounds()
     diffs_along_x, diffs_along_y = diffs
     self.assertTrue(contiguous)
     self.assertTrue(not diffs_along_x.any())
     self.assertTrue(not diffs_along_y.any())
Beispiel #4
0
 def test_lazy_bounds(self):
     # Getting lazy bounds realises them.
     coord = AuxCoord(self.pts_real, bounds=self.bds_lazy)
     self.assertTrue(coord.has_lazy_bounds())
     result = coord.bounds
     self.assertFalse(coord.has_lazy_bounds())
     self.assertEqualRealArraysAndDtypes(result, self.bds_real)
Beispiel #5
0
 def test_2d_lat_bounds(self):
     coord = AuxCoord(np.array([[1, 1], [3, 3]]),
                      bounds=np.array([[[0, 0, 2, 2], [0, 0, 2, 2]],
                                       [[2, 2, 4, 4], [2, 2, 4, 4]]]))
     expected = np.array([[0, 0, 0], [2, 2, 2], [4, 4, 4]])
     result = coord.contiguous_bounds()
     self.assertArrayEqual(result, expected)
Beispiel #6
0
 def test_real_set_lazy(self):
     # Setting new lazy points does not make a copy.
     coord = AuxCoord(self.pts_real)
     new_pts = self.pts_lazy + 102.3
     coord.points = new_pts
     result = coord.core_points()
     self.assertEqualLazyArraysAndDtypes(result, new_pts)
Beispiel #7
0
 def test_lazy_points(self):
     # Getting lazy points realises them.
     coord = AuxCoord(self.pts_lazy)
     self.assertTrue(coord.has_lazy_points())
     result = coord.points
     self.assertFalse(coord.has_lazy_points())
     self.assertEqualRealArraysAndDtypes(result, self.pts_real)
Beispiel #8
0
 def test_set_points_with_lazy_bounds(self):
     # Setting points does not touch lazy bounds.
     coord = AuxCoord(self.pts_real, bounds=self.bds_lazy)
     new_pts = self.pts_real + 102.3
     coord.points = new_pts
     result = coord.core_bounds()
     self.assertEqualLazyArraysAndDtypes(result, self.bds_lazy)
Beispiel #9
0
    def test_serialize(self):
        # Collapse a string AuxCoord, causing it to be serialised.
        string = Pair(np.array(['two', 'four', 'six', 'eight']),
                      np.array([['one', 'three'],
                                ['three', 'five'],
                                ['five', 'seven'],
                                ['seven', 'nine']]))
        string_nobounds = Pair(np.array(['ecks', 'why', 'zed']),
                               None)
        string_multi = Pair(np.array(['three', 'six', 'nine']),
                            np.array([['one', 'two', 'four', 'five'],
                                      ['four', 'five', 'seven', 'eight'],
                                      ['seven', 'eight', 'ten', 'eleven']]))

        def _serialize(data):
            return '|'.join(str(item) for item in data.flatten())

        for units in ['unknown', 'no_unit']:
            for points, bounds in [string, string_nobounds, string_multi]:
                coord = AuxCoord(points=points, bounds=bounds, units=units)
                collapsed_coord = coord.collapsed()
                self.assertArrayEqual(collapsed_coord.points,
                                      _serialize(points))
                if bounds is not None:
                    for index in np.ndindex(bounds.shape[1:]):
                        index_slice = (slice(None),) + tuple(index)
                        self.assertArrayEqual(
                            collapsed_coord.bounds[index_slice],
                            _serialize(bounds[index_slice]))
Beispiel #10
0
 def test_2d_discontiguous_along_y(self):
     coord = AuxCoord(self.points_3by3[::2, :],
                      bounds=self.lat_bounds_3by3[::2, :, :])
     contiguous, diffs = coord._discontiguity_in_bounds()
     diffs_along_x, diffs_along_y = diffs
     self.assertFalse(contiguous)
     self.assertTrue(not diffs_along_x.any())
     self.assertArrayEqual(diffs_along_y, np.array([[True, True, True]]))
Beispiel #11
0
 def test_numeric_nd(self):
     # Contiguous only defined for 2d bounds.
     coord = AuxCoord(points=np.array([3, 6, 9]),
                      bounds=np.array([[1, 2, 4, 5],
                                       [4, 5, 7, 8],
                                       [7, 8, 10, 11]]))
     with self.assertRaises(ValueError):
         coord.collapsed()
Beispiel #12
0
 def test_real_points_with_real_bounds(self):
     # Getting real points does not change real bounds.
     coord = AuxCoord(self.pts_real, bounds=self.bds_real)
     coord.points
     result = coord.core_bounds()
     self.assertArraysShareData(
         result, self.bds_real,
         'Bounds do not share data with the provided array.')
Beispiel #13
0
 def test_real_set_real(self):
     # Setting new real points does not make a copy.
     coord = AuxCoord(self.pts_real)
     new_pts = self.pts_real + 102.3
     coord.points = new_pts
     result = coord.core_points()
     self.assertArraysShareData(
         result, new_pts,
         'Points do not share data with the assigned array.')
Beispiel #14
0
 def test_2d_discontiguous_along_x(self):
     coord = AuxCoord(self.points_3by3[:, ::2],
                      bounds=self.lon_bounds_3by3[:, ::2, :])
     contiguous, diffs = coord._discontiguity_in_bounds()
     diffs_along_x, diffs_along_y = diffs
     self.assertFalse(contiguous)
     self.assertArrayEqual(diffs_along_x,
                           np.array([True, True, True]).reshape(3, 1))
     self.assertTrue(not diffs_along_y.any())
Beispiel #15
0
 def test_set_real_bounds(self):
     # Setting new real bounds does not make a copy.
     coord = AuxCoord(self.pts_real, bounds=self.bds_real)
     new_bounds = self.bds_real + 102.3
     coord.bounds = new_bounds
     result = coord.core_bounds()
     self.assertArraysShareData(
         result, new_bounds,
         'Bounds do not share data with the assigned array.')
Beispiel #16
0
 def test_2d_one_cell_along_y(self):
     # Test a 2D coord with a single cell along the y axis, where the coord
     # has shape (1, 2).
     coord = AuxCoord(self.points_3by3[:1, :],
                      bounds=self.lon_bounds_3by3[:1, :, :])
     contiguous, diffs = coord._discontiguity_in_bounds()
     diffs_along_x, diffs_along_y = diffs
     self.assertTrue(contiguous)
     self.assertTrue(not diffs_along_x.any())
     self.assertTrue(not diffs_along_y.any())
Beispiel #17
0
 def test_2d_one_cell(self):
     # Test a 2D coord with a single cell, where the coord has shape (1, 1).
     coord = AuxCoord(self.points_3by3[:1, :1],
                      bounds=self.lon_bounds_3by3[:1, :1, :])
     contiguous, diffs = coord._discontiguity_in_bounds()
     diffs_along_x, diffs_along_y = diffs
     expected_diffs = np.array([], dtype=np.int64)
     self.assertTrue(contiguous)
     self.assertArrayEqual(diffs_along_x, expected_diffs.reshape(1, 0))
     self.assertArrayEqual(diffs_along_y, expected_diffs.reshape(0, 1))
Beispiel #18
0
 def test_2d_one_cell_along_x(self):
     # Test a 2D coord with a single cell along the x axis, where the coord
     # has shape (2, 1).
     coord = AuxCoord(self.points_3by3[:, :1],
                      bounds=self.lat_bounds_3by3[:, :1, :])
     contiguous, diffs = coord._discontiguity_in_bounds()
     diffs_along_x, diffs_along_y = diffs
     self.assertTrue(contiguous)
     self.assertTrue(not diffs_along_x.any())
     self.assertArrayEqual(diffs_along_y, np.array([0, 0]).reshape(2, 1))
Beispiel #19
0
 def test_2d_contiguous_along_x_atol(self):
     coord = AuxCoord(self.points_3by3[:, ::2],
                      bounds=self.lon_bounds_3by3[:, ::2, :])
     # Set a high atol that allows small discontiguities.
     contiguous, diffs = coord._discontiguity_in_bounds(atol=5)
     diffs_along_x, diffs_along_y = diffs
     self.assertTrue(contiguous)
     self.assertArrayEqual(diffs_along_x,
                           np.array([False, False, False]).reshape(3, 1))
     self.assertTrue(not diffs_along_y.any())
Beispiel #20
0
    def test_coord_input(self):
        source = AuxCoord(self.src_levels.data)
        source.metadata = self.src_levels.metadata

        for axis in self.axes:
            result = relevel(self.cube,
                             source,
                             [0, 12, 13],
                             axis=axis)
            self.assertEqual(result.shape, (3, 1, 1))
            assert_array_equal(result.data.flatten(), [0, 120, np.nan])
Beispiel #21
0
 def test_lazy_complex(self):
     raw_points = np.arange(12).reshape(4, 3)
     points = as_lazy_data(raw_points, raw_points.shape)
     coord = AuxCoord(points)
     self.assertTrue(is_lazy_data(coord.core_points()))
     result = AuxCoordFactory._nd_points(coord, (3, 2), 5)
     # Check we haven't triggered the loading of the coordinate values.
     self.assertTrue(is_lazy_data(coord.core_points()))
     self.assertTrue(is_lazy_data(result))
     expected = raw_points.T[np.newaxis, np.newaxis, ..., np.newaxis]
     self.assertArrayEqual(result, expected)
Beispiel #22
0
 def test_2d_discontiguous_along_x_and_y(self):
     coord = AuxCoord(np.array([[1, 5], [3, 5]]),
                      bounds=np.array([[[0, 2, 2, 0], [4, 6, 6, 4]],
                                       [[2, 4, 4, 2], [4, 6, 6, 4]]]))
     contiguous, diffs = coord._discontiguity_in_bounds()
     diffs_along_x, diffs_along_y = diffs
     exp_x_diffs = np.array([True, False]).reshape(2, 1)
     exp_y_diffs = np.array([True, False]).reshape(1, 2)
     self.assertFalse(contiguous)
     self.assertArrayEqual(diffs_along_x, exp_x_diffs)
     self.assertArrayEqual(diffs_along_y, exp_y_diffs)
Beispiel #23
0
 def test_2d_discontiguous_mod_360(self):
     # Test that longitude coordinates are adjusted by the 360 modulus when
     # calculating the discontiguities in contiguous bounds.
     coord = AuxCoord(
         [[175, -175], [175, -175]], standard_name='longitude',
         bounds=np.array([[[170, 180, 180, 170], [10, 20, 20, 10]],
                          [[170, 180, 180, 170], [10, 20, 20, 10]]]))
     contiguous, diffs = coord._discontiguity_in_bounds()
     diffs_along_x, diffs_along_y = diffs
     self.assertFalse(contiguous)
     self.assertArrayEqual(diffs_along_x, np.array([[True], [True]]))
     self.assertTrue(not diffs_along_y.any())
Beispiel #24
0
    def test_lazy_nd_points_and_bounds(self):
        import dask.array as da

        self.setupTestArrays((3, 4))
        coord = AuxCoord(self.pts_lazy, bounds=self.bds_lazy)

        collapsed_coord = coord.collapsed()

        self.assertTrue(collapsed_coord.has_lazy_points())
        self.assertTrue(collapsed_coord.has_lazy_bounds())

        self.assertArrayEqual(collapsed_coord.points, da.array([55]))
        self.assertArrayEqual(collapsed_coord.bounds, da.array([[-2, 112]]))
Beispiel #25
0
 def test_lazy_complex(self):
     raw_points = np.arange(12).reshape(4, 3)
     points = as_lazy_data(raw_points, raw_points.shape)
     raw_bounds = np.arange(24).reshape(4, 3, 2)
     bounds = as_lazy_data(raw_bounds, raw_bounds.shape)
     coord = AuxCoord(points, bounds=bounds)
     self.assertTrue(is_lazy_data(coord.core_bounds()))
     result = AuxCoordFactory._nd_bounds(coord, (3, 2), 5)
     # Check we haven't triggered the loading of the coordinate values.
     self.assertTrue(is_lazy_data(coord.core_bounds()))
     self.assertTrue(is_lazy_data(result))
     expected = raw_bounds.transpose((1, 0, 2)).reshape(1, 1, 3, 4, 1, 2)
     self.assertArrayEqual(result, expected)
Beispiel #26
0
 def test_2d_discontiguous_mod_360_not_longitude(self):
     # Test that non-longitude coordinates are not adjusted by the 360
     # modulus when calculating the discontiguities in discontiguous bounds.
     coord = AuxCoord(
         [[-150, 350], [-150, 350]], standard_name='height',
         bounds=np.array([[[-400, 100, 100, -400], [200, 600, 600, 200]],
                          [[-400, 100, 100, -400], [200, 600, 600, 200]]])
         )
     contiguous, diffs = coord._discontiguity_in_bounds()
     diffs_along_x, diffs_along_y = diffs
     self.assertFalse(contiguous)
     self.assertArrayEqual(diffs_along_x, np.array([[True], [True]]))
     self.assertTrue(not diffs_along_y.any())
Beispiel #27
0
    def test_lazy_nd_bounds(self):
        import dask.array as da

        self.setupTestArrays((3, 4))
        coord = AuxCoord(self.pts_real, bounds=self.bds_lazy)

        collapsed_coord = coord.collapsed()

        # Note that the new points get recalculated from the lazy bounds
        #  and so end up as lazy
        self.assertTrue(collapsed_coord.has_lazy_points())
        self.assertTrue(collapsed_coord.has_lazy_bounds())

        self.assertArrayEqual(collapsed_coord.points, np.array([55]))
        self.assertArrayEqual(collapsed_coord.bounds, da.array([[-2, 112]]))
Beispiel #28
0
    def test_xy_dimensionality(self):
        u, v = uv_cubes()
        # Replace 1d lat with 2d lat.
        x = u.coord('grid_longitude').points
        y = u.coord('grid_latitude').points
        x2d, y2d = np.meshgrid(x, y)
        lat_2d = AuxCoord(y2d, 'grid_latitude', units='degrees',
                          coord_system=u.coord('grid_latitude').coord_system)
        for cube in (u, v):
            cube.remove_coord('grid_latitude')
            cube.add_aux_coord(lat_2d.copy(), (0, 1))

        with self.assertRaisesRegexp(
                ValueError,
                'x and y coordinates must have the same number of dimensions'):
            rotate_winds(u, v, iris.coord_systems.OSGB())
Beispiel #29
0
 def construct_new_coord_given_points(coord, points):
     # Handle what was previously a DimCoord which may no longer be
     # monotonic.
     try:
         return coord.copy(points)
     except ValueError:
         return AuxCoord.from_coord(coord).copy(points)
Beispiel #30
0
 def test_no_slider_from_aux(self):
     coords = ('grid_longitude', 'grid_latitude')
     self.cube.remove_coord('time')
     fp = self.cube.coord('forecast_period')
     self.cube.remove_coord('forecast_period')
     aux = AuxCoord.from_coord(fp)
     self.cube.add_aux_coord(aux, 0)
     plot = Plot2D(self.cube, self.axes, coords=coords)
     self.assertEqual(plot._slider_dim_by_name, {})
Beispiel #31
0
def _cf_height_from_name(z_coord, lower_bound=None, upper_bound=None):
    """
    Parser for the z component of field headings.

    This parse is specifically for handling the z component of NAME field
    headings, which include height above ground level, height above sea level
    and flight level etc.  This function returns an iris coordinate
    representing this field heading.

    Args:

    * z_coord (list):
        A field heading, specifically the z component.

    Returns:
        An instance of :class:`iris.coords.AuxCoord` representing the
        interpretation of the supplied field heading.

    """

    # NAMEII - integer/float support.
    # Match against height agl, asl and Pa.
    pattern = re.compile(r'^From\s*'
                         '(?P<lower_bound>[0-9]+(\.[0-9]+)?)'
                         '\s*-\s*'
                         '(?P<upper_bound>[0-9]+(\.[0-9]+)?)'
                         '\s*(?P<type>m\s*asl|m\s*agl|Pa)'
                         '(?P<extra>.*)')

    # Match against flight level.
    pattern_fl = re.compile(r'^From\s*'
                            '(?P<type>FL)'
                            '(?P<lower_bound>[0-9]+(\.[0-9]+)?)'
                            '\s*-\s*FL'
                            '(?P<upper_bound>[0-9]+(\.[0-9]+)?)'
                            '(?P<extra>.*)')

    # NAMEIII - integer/float support.
    # Match scalar against height agl, asl, Pa, FL
    pattern_scalar = re.compile(r'Z\s*=\s*'
                                '(?P<point>[0-9]+(\.[0-9]+)?([eE][+-]?\d+)?)'
                                '\s*(?P<type>m\s*agl|m\s*asl|FL|Pa)'
                                '(?P<extra>.*)')

    type_name = {'magl': 'height', 'masl': 'altitude', 'FL': 'flight_level',
                 'Pa': 'air_pressure'}
    patterns = [pattern, pattern_fl, pattern_scalar]

    units = 'no-unit'
    points = z_coord
    bounds = None
    standard_name = None
    long_name = 'z'

    if upper_bound is not None and lower_bound is not None:
        match_ub = pattern_scalar.match(upper_bound)
        match_lb = pattern_scalar.match(lower_bound)

    for pattern in patterns:
        match = pattern.match(z_coord)
        if match:
            match = match.groupdict()
            # Do not interpret if there is additional information to the match
            if match['extra']:
                break
            units = match['type'].replace(' ', '')
            name = type_name[units]

            # Interpret points if present.
            if 'point' in match:
                points = float(match['point'])
                if upper_bound is not None and lower_bound is not None:
                    bounds = np.array([float(match_lb.groupdict()['point']),
                                       float(match_ub.groupdict()['point'])])
            # Interpret points from bounds.
            else:
                bounds = np.array([float(match['lower_bound']),
                                   float(match['upper_bound'])])
                points = bounds.sum() / 2.

            long_name = None
            if name == 'altitude':
                units = units[0]
                standard_name = name
                long_name = 'altitude above sea level'
            elif name == 'height':
                units = units[0]
                standard_name = name
                long_name = 'height above ground level'
            elif name == 'air_pressure':
                standard_name = name
            elif name == 'flight_level':
                long_name = name
            units = _parse_units(units)

            break

    coord = AuxCoord(points, units=units, standard_name=standard_name,
                     long_name=long_name, bounds=bounds)

    return coord
Beispiel #32
0
def ocean_sigma_z():
    """
    Return a sample cube with an
    :class:`iris.aux_factory.OceanSigmaZFactory` vertical coordinate.

    This is a fairly small cube with real coordinate arrays.  The coordinate
    values are derived from the sample data linked at
    https://github.com/SciTools/iris/pull/509#issuecomment-23565381.

    """
    co_time = DimCoord([0.0, 1.0], standard_name="time", units="")
    co_lats = DimCoord(
        [-58.1, -52.7, -46.9], standard_name="latitude", units=Unit("degrees")
    )
    co_lons = DimCoord(
        [65.1, 72.9, 83.7, 96.5],
        standard_name="longitude",
        units=Unit("degrees"),
    )
    co_ssh = AuxCoord(
        [
            [
                [-0.63157895, -0.52631579, -0.42105263, -0.31578947],
                [-0.78947368, -0.68421053, -0.57894737, -0.47368421],
                [-0.94736842, -0.84210526, -0.73684211, -0.63157895],
            ],
            [
                [-0.84210526, -0.73684211, -0.63157895, -0.52631579],
                [-1.00000000, -0.89473684, -0.78947368, -0.68421053],
                [-1.15789474, -1.05263158, -0.94736842, -0.84210526],
            ],
        ],
        standard_name="sea_surface_height",
        units=Unit("m"),
    )

    co_sigma = AuxCoord(
        [0.0, -0.1, -0.6, -1.0, -1.0],
        standard_name="ocean_sigma_z_coordinate",
        units=Unit("1"),
        attributes={"positive": "up"},
    )

    co_zlay = AuxCoord(
        [-137.2, -137.3, -137.4, -368.4, -1495.6],
        long_name="layer_depth",
        units=Unit("m"),
    )
    co_depth = AuxCoord(
        [
            [1625.7, 3921.2, 4106.4, 5243.5],
            [3615.4, 4942.6, 3883.6, 4823.1],
            [3263.2, 2816.3, 2741.8, 3883.6],
        ],
        standard_name="depth",
        units=Unit("m"),
    )
    co_depthc = DimCoord(137.9, long_name="depth_c", units=Unit("m"))
    co_nsigma = DimCoord(3, long_name="nsigma")

    cube = Cube(np.zeros((2, 5, 3, 4)))
    cube.add_dim_coord(co_time, 0)
    cube.add_dim_coord(co_lats, 2)
    cube.add_dim_coord(co_lons, 3)
    cube.add_aux_coord(co_zlay, 1)
    cube.add_aux_coord(co_sigma, 1)
    cube.add_aux_coord(co_ssh, (0, 2, 3))
    cube.add_aux_coord(co_depth, (2, 3))
    cube.add_aux_coord(co_depthc)
    cube.add_aux_coord(co_nsigma)

    fact = iris.aux_factory.OceanSigmaZFactory(
        depth=co_depth,
        eta=co_ssh,
        depth_c=co_depthc,
        zlev=co_zlay,
        sigma=co_sigma,
        nsigma=co_nsigma,
    )
    cube.add_aux_factory(fact)
    return cube
Beispiel #33
0
def _generate_cubes(header,
                    column_headings,
                    coords,
                    data_arrays,
                    cell_methods=None):
    """
    Yield :class:`iris.cube.Cube` instances given
    the headers, column headings, coords and data_arrays extracted
    from a NAME file.

    """
    for i, data_array in enumerate(data_arrays):
        # Turn the dictionary of column headings with a list of header
        # information for each field into a dictionary of headings for
        # just this field.
        field_headings = {k: v[i] for k, v in column_headings.items()}

        # Make a cube.
        cube = iris.cube.Cube(data_array)

        # Determine the name and units.
        name = "{} {}".format(field_headings["Species"],
                              field_headings["Quantity"])
        name = name.upper().replace(" ", "_")
        cube.rename(name)

        # Some units are not in SI units, are missing spaces or typed
        # in the wrong case. _parse_units returns units that are
        # recognised by Iris.
        cube.units = _parse_units(field_headings["Units"])

        # Define and add the singular coordinates of the field (flight
        # level, time etc.)
        if "Z" in field_headings:
            (upper_bound, ) = [
                field_headings["... to [Z]"]
                if "... to [Z]" in field_headings else None
            ]
            (lower_bound, ) = [
                field_headings["... from [Z]"]
                if "... from [Z]" in field_headings else None
            ]
            z_coord = _cf_height_from_name(
                field_headings["Z"],
                upper_bound=upper_bound,
                lower_bound=lower_bound,
            )
            cube.add_aux_coord(z_coord)

        # Define the time unit and use it to serialise the datetime for
        # the time coordinate.
        time_unit = cf_units.Unit("hours since epoch",
                                  calendar=cf_units.CALENDAR_GREGORIAN)

        # Build time, height, latitude and longitude coordinates.
        for coord in coords:
            pts = coord.values
            coord_sys = None
            if coord.name == "latitude" or coord.name == "longitude":
                coord_units = "degrees"
                coord_sys = iris.coord_systems.GeogCS(EARTH_RADIUS)
            if (coord.name == "projection_x_coordinate"
                    or coord.name == "projection_y_coordinate"):
                coord_units = "m"
                coord_sys = iris.coord_systems.OSGB()
            if coord.name == "height":
                coord_units = "m"
                long_name = "height above ground level"
                pts = coord.values
            if coord.name == "altitude":
                coord_units = "m"
                long_name = "altitude above sea level"
                pts = coord.values
            if coord.name == "air_pressure":
                coord_units = "Pa"
                pts = coord.values
            if coord.name == "flight_level":
                pts = coord.values
                long_name = "flight_level"
                coord_units = _parse_units("FL")
            if coord.name == "time":
                coord_units = time_unit
                pts = time_unit.date2num(coord.values)

            if coord.dimension is not None:
                if coord.name == "longitude":
                    circular = iris.util._is_circular(pts, 360.0)
                else:
                    circular = False
                if coord.name == "flight_level":
                    icoord = DimCoord(points=pts,
                                      units=coord_units,
                                      long_name=long_name)
                else:
                    icoord = DimCoord(
                        points=pts,
                        standard_name=coord.name,
                        units=coord_units,
                        coord_system=coord_sys,
                        circular=circular,
                    )
                if coord.name == "height" or coord.name == "altitude":
                    icoord.long_name = long_name
                if (coord.name == "time"
                        and "Av or Int period" in field_headings):
                    dt = coord.values - field_headings["Av or Int period"]
                    bnds = time_unit.date2num(np.vstack((dt, coord.values)).T)
                    icoord.bounds = bnds
                else:
                    icoord.guess_bounds()
                cube.add_dim_coord(icoord, coord.dimension)
            else:
                icoord = AuxCoord(
                    points=pts[i],
                    standard_name=coord.name,
                    coord_system=coord_sys,
                    units=coord_units,
                )
                if (coord.name == "time"
                        and "Av or Int period" in field_headings):
                    dt = coord.values - field_headings["Av or Int period"]
                    bnds = time_unit.date2num(np.vstack((dt, coord.values)).T)
                    icoord.bounds = bnds[i, :]
                cube.add_aux_coord(icoord)

        # Headings/column headings which are encoded elsewhere.
        headings = [
            "X",
            "Y",
            "Z",
            "Time",
            "T",
            "Units",
            "Av or Int period",
            "... from [Z]",
            "... to [Z]",
            "X grid origin",
            "Y grid origin",
            "X grid size",
            "Y grid size",
            "X grid resolution",
            "Y grid resolution",
            "Number of field cols",
            "Number of preliminary cols",
            "Number of fields",
            "Number of series",
            "Output format",
        ]

        # Add the Main Headings as attributes.
        for key, value in header.items():
            if value is not None and value != "" and key not in headings:
                cube.attributes[key] = value

        # Add the Column Headings as attributes
        for key, value in field_headings.items():
            if value is not None and value != "" and key not in headings:
                cube.attributes[key] = value

        if cell_methods is not None:
            cube.add_cell_method(cell_methods[i])

        yield cube
Beispiel #34
0
 def test_coord_with_irregular_step(self):
     # Check that a `CoordinateNotRegularError` is captured.
     coord = AuxCoord(np.array([2, 5, 1, 4]))
     result = is_regular(coord)
     self.assertFalse(result)
Beispiel #35
0
    def create_data_object(self, filenames, variable):
        logging.debug("Creating data object for variable " + variable)

        variables = [("ER2_IMU/Longitude", "x"), ("ER2_IMU/Latitude", "y"),
                     ("ER2_IMU/gps_time", "t"), ("State/Pressure", "p"),
                     ("DataProducts/Altitude", "z"), ("header/date", ""),
                     (variable, '')]

        logging.info("Listing coordinates: " + str(variables))

        var_data = read_many_files_individually(filenames,
                                                [v[0] for v in variables])

        date_times = []
        for times, date in zip(var_data['ER2_IMU/gps_time'],
                               var_data['header/date']):
            # Date is stored as an array (of length 92??) of floats with format: yyyymmdd
            date_str = str(int(date[0]))
            t_unit = Unit('hours since {}-{}-{} 00:00:00'.format(
                date_str[0:4], date_str[4:6], date_str[6:8]))
            date_times.append(
                t_unit.convert(get_data(times), cis_standard_time_unit))

        # time_data = utils.concatenate([get_data(i) for i in var_data['ER2_IMU/gps_time']])
        # date_str = str(int(var_data['header/date'][0][0]))
        # Flatten the data by taking the 0th column of the transpose
        time_coord = DimCoord(utils.concatenate(date_times).T[0],
                              standard_name='time',
                              units=cis_standard_time_unit)

        # TODO This won't work for multiple files since the altitude bins are different for each flight...
        alt_data = utils.concatenate(
            [get_data(i) for i in var_data["DataProducts/Altitude"]])
        alt_coord = DimCoord(alt_data[0], standard_name='altitude', units='m')

        pres_data = utils.concatenate(
            [get_data(i) for i in var_data["State/Pressure"]])
        pres_coord = AuxCoord(pres_data,
                              standard_name='air_pressure',
                              units='atm')
        # Fix the air-pressure units
        pres_coord.convert_units('hPa')

        lat_data = utils.concatenate(
            [get_data(i) for i in var_data['ER2_IMU/Latitude']])
        lat_coord = AuxCoord(lat_data.T[0], standard_name='latitude')

        lon_data = utils.concatenate(
            [get_data(i) for i in var_data['ER2_IMU/Longitude']])
        lon_coord = AuxCoord(lon_data.T[0], standard_name='longitude')

        data = utils.concatenate([get_data(i) for i in var_data[variable]])
        metadata = get_metadata(var_data[variable][0])

        cube = Cube(np.ma.masked_invalid(data),
                    long_name=metadata.misc['Description'],
                    units=self.clean_units(metadata.units),
                    dim_coords_and_dims=[(alt_coord, 1), (time_coord, 0)],
                    aux_coords_and_dims=[(lat_coord, (0, )),
                                         (lon_coord, (0, )),
                                         (pres_coord, (0, 1))])
        gd = GriddedData.make_from_cube(cube)
        return gd
Beispiel #36
0
def test_get_allvars_fix():
    """Test getting of fix."""
    fix = Fix.get_fixes('CMIP5', 'FGOALS-s2', 'Amon', 'tas')
    assert fix == [AllVars(None)]


LAT_COORD = DimCoord(
    [-20.0, 0.0, 10.0],
    bounds=[[-30.0, -10.0], [-10.0, 5.0], [5.0, 20.0]],
    var_name='lat',
    standard_name='latitude',
)
LAT_COORD_MULT = AuxCoord(
    [[-20.0], [0.0], [10.0]],
    bounds=[[[-30.0, -10.0]], [[-10.0, 5.0]], [[5.0, 20.0]]],
    var_name='lat',
    standard_name='latitude',
)
LAT_COORD_SMALL = DimCoord([0.0],
                           bounds=[-45.0, 45.0],
                           var_name='lat',
                           standard_name='latitude')


def test_allvars_fix_metadata():
    """Test ``fix_metadata`` for all variables."""
    cubes = CubeList([
        Cube([1, 2, 3], dim_coords_and_dims=[(LAT_COORD.copy(), 0)]),
        Cube([[1], [2], [3]],
             aux_coords_and_dims=[(LAT_COORD_MULT.copy(), (0, 1))]),
        Cube([1], dim_coords_and_dims=[(LAT_COORD_SMALL.copy(), 0)]),
Beispiel #37
0
 def test_2d_coord(self):
     coord = AuxCoord(np.arange(8).reshape(2, 4))
     exp_emsg = "Expected 1D coord"
     with self.assertRaisesRegex(CoordinateMultiDimError, exp_emsg):
         regular_step(coord)
def set_up_spot_cube(point_data,
                     validity_time=1487311200,
                     forecast_period=0,
                     number_of_sites=3):
    """Set up a spot data cube at a given validity time and forecast period for
       a given number of sites.

       Produces a cube with dimension coordinates of time, percentile
       and index. There will be one point in the percentile and time
       coordinates, and as many points in index coordinate as number_of_sites.
       The output cube will also have auxillary coordinates for altitude,
       wmo_site, forecast_period, and forecast_reference_time.

       Args:
           point_data (float):
               The value for the data in the cube, which will be used for
               every site.
       Keyword Args:
           validity_time (float):
               The value for the validity time for your data, defaults to
               1487311200 i.e. 2017-02-17 06:00:00
           forecast_period (float):
               The forecast period for your cube in hours.
           number_of_sites (int):
               The number of sites you want in your output cube.
       Returns:
           cube (iris.cube.Cube):
               Example spot data cube.
    """
    # Set up a data array with all the values the same as point_data.
    data = np.ones((1, 1, number_of_sites)) * point_data
    # Set up dimension coordinates.
    time = DimCoord(np.array([validity_time]),
                    standard_name='time',
                    units=cf_units.Unit('seconds since 1970-01-01 00:00:00',
                                        calendar='gregorian'))
    percentile = DimCoord(np.array([50.]), long_name="percentile", units='%')
    indices = np.arange(number_of_sites)
    index = DimCoord(indices, units=cf_units.Unit('1'), long_name='index')
    # Set up auxillary coordinates.
    latitudes = np.ones(number_of_sites) * 54
    latitude = AuxCoord(latitudes,
                        standard_name='latitude',
                        units='degrees',
                        coord_system=GeogCS(6371229.0))
    longitudes = np.arange(number_of_sites)
    longitude = AuxCoord(longitudes,
                         standard_name='longitude',
                         units='degrees',
                         coord_system=GeogCS(6371229.0))
    altitudes = np.arange(number_of_sites) + 100
    altitude = DimCoord(altitudes, standard_name='altitude', units='m')
    wmo_sites = np.arange(number_of_sites) + 1000
    wmo_site = AuxCoord(wmo_sites,
                        units=cf_units.Unit('1'),
                        long_name='wmo_site')
    forecast_period_coord = AuxCoord(np.array(forecast_period * 3600),
                                     standard_name='forecast_period',
                                     units='seconds')
    # Create cube
    cube = Cube(data,
                standard_name="air_temperature",
                dim_coords_and_dims=[
                    (time, 0),
                    (percentile, 1),
                    (index, 2),
                ],
                aux_coords_and_dims=[(latitude, 2), (longitude, 2),
                                     (altitude, 2), (wmo_site, 2),
                                     (forecast_period_coord, 0)],
                units="K")
    # Add scalar forecast_reference_time.
    cycle_time = validity_time - forecast_period * 3600
    forecast_reference_time = AuxCoord(np.array([cycle_time]),
                                       standard_name='forecast_reference_time',
                                       units=cf_units.Unit(
                                           'seconds since 1970-01-01 00:00:00',
                                           calendar='gregorian'))
    cube.add_aux_coord(forecast_reference_time)
    return cube
Beispiel #39
0
 def test_mixture_default(self):
     token = "air temperature"  # includes space
     coord = AuxCoord(1, long_name=token)
     result = CellMethod(self.method, coords=[coord, token])
     expected = "{}: unknown, unknown".format(self.method)
     self.assertEqual(str(result), expected)
Beispiel #40
0
def _make_cube(x, y, data, aux=None, offset=0, scalar=None):
    """
    A convenience test function that creates a custom 2D cube.

    Args:

    * x:
        A (start, stop, step) tuple for specifying the
        x-axis dimensional coordinate points. Bounds are
        automatically guessed.

    * y:
        A (start, stop, step) tuple for specifying the
        y-axis dimensional coordinate points. Bounds are
        automatically guessed.

    * data:
        The data payload for the cube.

    Kwargs:

    * aux:
        A CSV string specifying which points only auxiliary
        coordinates to create. Accepts either of 'x', 'y', 'xy'.

    * offset:
        Offset value to be added to the 'xy' auxiliary coordinate
        points.

    * scalar:
        Create a 'height' scalar coordinate with the given value.

    Returns:
        The newly created 2D :class:`iris.cube.Cube`.

    """
    x_range = np.arange(*x, dtype=np.float32)
    y_range = np.arange(*y, dtype=np.float32)
    x_size = len(x_range)
    y_size = len(y_range)

    cube_data = np.empty((y_size, x_size), dtype=np.float32)
    cube_data[:] = data
    cube = iris.cube.Cube(cube_data)
    coord = DimCoord(y_range, long_name='y')
    coord.guess_bounds()
    cube.add_dim_coord(coord, 0)
    coord = DimCoord(x_range, long_name='x')
    coord.guess_bounds()
    cube.add_dim_coord(coord, 1)

    if aux is not None:
        aux = aux.split(',')
        if 'y' in aux:
            coord = AuxCoord(y_range * 10, long_name='y-aux')
            cube.add_aux_coord(coord, (0, ))
        if 'x' in aux:
            coord = AuxCoord(x_range * 10, long_name='x-aux')
            cube.add_aux_coord(coord, (1, ))
        if 'xy' in aux:
            payload = np.arange(y_size * x_size,
                                dtype=np.float32).reshape(y_size, x_size)
            coord = AuxCoord(payload * 100 + offset, long_name='xy-aux')
            cube.add_aux_coord(coord, (0, 1))

    if scalar is not None:
        data = np.array([scalar], dtype=np.float32)
        coord = AuxCoord(data, long_name='height', units='m')
        cube.add_aux_coord(coord, ())

    return cube
Beispiel #41
0
def _make_cube_3d(x, y, z, data, aux=None, offset=0):
    """
    A convenience test function that creates a custom 3D cube.

    Args:

    * x:
        A (start, stop, step) tuple for specifying the
        x-axis dimensional coordinate points. Bounds are
        automatically guessed.

    * y:
        A (start, stop, step) tuple for specifying the
        y-axis dimensional coordinate points. Bounds are
        automatically guessed.

    * z:
        A (start, stop, step) tuple for specifying the
        z-axis dimensional coordinate points. Bounds are
        automatically guessed.

    * data:
        The data payload for the cube.

    Kwargs:

    * aux:
        A CSV string specifying which points only auxiliary
        coordinates to create. Accepts either of 'x', 'y', 'z',
        'xy', 'xz', 'yz', 'xyz'.

    * offset:
        Offset value to be added to non-1D auxiliary coordinate
        points.

    Returns:
        The newly created 3D :class:`iris.cube.Cube`.

    """
    x_range = np.arange(*x, dtype=np.float32)
    y_range = np.arange(*y, dtype=np.float32)
    z_range = np.arange(*z, dtype=np.float32)
    x_size, y_size, z_size = len(x_range), len(y_range), len(z_range)

    cube_data = np.empty((x_size, y_size, z_size), dtype=np.float32)
    cube_data[:] = data
    cube = iris.cube.Cube(cube_data)
    coord = DimCoord(z_range, long_name='z')
    coord.guess_bounds()
    cube.add_dim_coord(coord, 0)
    coord = DimCoord(y_range, long_name='y')
    coord.guess_bounds()
    cube.add_dim_coord(coord, 1)
    coord = DimCoord(x_range, long_name='x')
    coord.guess_bounds()
    cube.add_dim_coord(coord, 2)

    if aux is not None:
        aux = aux.split(',')
        if 'z' in aux:
            coord = AuxCoord(z_range * 10, long_name='z-aux')
            cube.add_aux_coord(coord, (0, ))
        if 'y' in aux:
            coord = AuxCoord(y_range * 10, long_name='y-aux')
            cube.add_aux_coord(coord, (1, ))
        if 'x' in aux:
            coord = AuxCoord(x_range * 10, long_name='x-aux')
            cube.add_aux_coord(coord, (2, ))
        if 'xy' in aux:
            payload = np.arange(x_size * y_size,
                                dtype=np.float32).reshape(y_size, x_size)
            coord = AuxCoord(payload + offset, long_name='xy-aux')
            cube.add_aux_coord(coord, (1, 2))
        if 'xz' in aux:
            payload = np.arange(x_size * z_size,
                                dtype=np.float32).reshape(z_size, x_size)
            coord = AuxCoord(payload * 10 + offset, long_name='xz-aux')
            cube.add_aux_coord(coord, (0, 2))
        if 'yz' in aux:
            payload = np.arange(y_size * z_size,
                                dtype=np.float32).reshape(z_size, y_size)
            coord = AuxCoord(payload * 100 + offset, long_name='yz-aux')
            cube.add_aux_coord(coord, (0, 1))
        if 'xyz' in aux:
            payload = np.arange(x_size * y_size * z_size,
                                dtype=np.float32).reshape(
                                    z_size, y_size, x_size)
            coord = AuxCoord(payload * 1000 + offset, long_name='xyz-aux')
            cube.add_aux_coord(coord, (0, 1, 2))

    return cube
Beispiel #42
0
def _convert_vertical_coords(lbcode,
                             lbvc,
                             blev,
                             lblev,
                             stash,
                             bhlev,
                             bhrlev,
                             brsvd1,
                             brsvd2,
                             brlev,
                             dim=None):
    """
    Encode scalar or vector vertical level values from PP headers as CM data
    components.

    Args:

    * lbcode:
        Scalar field :class:`iris.fileformats.pp.SplittableInt` value.

    * lbvc:
        Scalar field value.

    * blev:
        Scalar field value or :class:`numpy.ndarray` vector of field values.

    * lblev:
        Scalar field value or :class:`numpy.ndarray` vector of field values.

    * stash:
        Scalar field :class:`iris.fileformats.pp.STASH` value.

    * bhlev:
        Scalar field value or :class:`numpy.ndarray` vector of field values.

    * bhrlev:
        Scalar field value or :class:`numpy.ndarray` vector of field values.

    * brsvd1:
        Scalar field value or :class:`numpy.ndarray` vector of field values.

    * brsvd2:
        Scalar field value or :class:`numpy.ndarray` vector of field values.

    * brlev:
        Scalar field value or :class:`numpy.ndarray` vector of field values.

    Kwargs:

    * dim:
        Associated dimension of the vertical coordinate. Defaults to None.

    Returns:
        A tuple containing a list of coords_and_dims, and a list of factories.

    """
    factories = []
    coords_and_dims = []

    # See Word no. 33 (LBLEV) in section 4 of UM Model Docs (F3).
    BASE_RHO_LEVEL_LBLEV = 9999
    model_level_number = np.atleast_1d(lblev)
    model_level_number[model_level_number == BASE_RHO_LEVEL_LBLEV] = 0

    # Ensure to vectorise these arguments as arrays, as they participate
    # in the conditions of convert rules.
    blev = np.atleast_1d(blev)
    brsvd1 = np.atleast_1d(brsvd1)
    brlev = np.atleast_1d(brlev)

    # Height.
    if (lbvc == 1) and \
            str(stash) not in STASHCODE_IMPLIED_HEIGHTS and \
            np.all(blev != -1):
        coord = _dim_or_aux(blev,
                            standard_name='height',
                            units='m',
                            attributes={'positive': 'up'})
        coords_and_dims.append((coord, dim))

    if str(stash) in STASHCODE_IMPLIED_HEIGHTS:
        height = STASHCODE_IMPLIED_HEIGHTS[str(stash)]
        coord = DimCoord(height,
                         standard_name='height',
                         units='m',
                         attributes={'positive': 'up'})
        coords_and_dims.append((coord, None))

    # Model level number.
    if (len(lbcode) != 5) and \
            (lbvc == 2):
        coord = _dim_or_aux(model_level_number,
                            standard_name='model_level_number',
                            attributes={'positive': 'down'})
        coords_and_dims.append((coord, dim))

    # Depth - unbound.
    if (len(lbcode) != 5) and \
            (lbvc == 2) and \
            np.all(brsvd1 == brlev):
        coord = _dim_or_aux(blev,
                            standard_name='depth',
                            units='m',
                            attributes={'positive': 'down'})
        coords_and_dims.append((coord, dim))

    # Depth - bound.
    if (len(lbcode) != 5) and \
            (lbvc == 2) and \
            np.all(brsvd1 != brlev):
        coord = _dim_or_aux(blev,
                            standard_name='depth',
                            units='m',
                            bounds=np.vstack((brsvd1, brlev)).T,
                            attributes={'positive': 'down'})
        coords_and_dims.append((coord, dim))

    # Depth - unbound and bound (mixed).
    if (len(lbcode) != 5) and \
            (lbvc == 2) and \
            (np.any(brsvd1 == brlev) and np.any(brsvd1 != brlev)):
        lower = np.where(brsvd1 == brlev, blev, brsvd1)
        upper = np.where(brsvd1 == brlev, blev, brlev)
        coord = _dim_or_aux(blev,
                            standard_name='depth',
                            units='m',
                            bounds=np.vstack((lower, upper)).T,
                            attributes={'positive': 'down'})
        coords_and_dims.append((coord, dim))

    # Soil level/depth.
    if len(lbcode) != 5 and lbvc == 6:
        if np.all(brsvd1 == 0) and np.all(brlev == 0):
            # UM populates lblev, brsvd1 and brlev metadata INCORRECTLY,
            # so continue to treat as a soil level.
            coord = _dim_or_aux(model_level_number,
                                long_name='soil_model_level_number',
                                attributes={'positive': 'down'})
            coords_and_dims.append((coord, dim))
        elif np.any(brsvd1 != brlev):
            # UM populates metadata CORRECTLY,
            # so treat it as the expected (bounded) soil depth.
            coord = _dim_or_aux(blev,
                                standard_name='depth',
                                units='m',
                                bounds=np.vstack((brsvd1, brlev)).T,
                                attributes={'positive': 'down'})
            coords_and_dims.append((coord, dim))

    # Pressure.
    if (lbvc == 8) and \
            (len(lbcode) != 5 or (len(lbcode) == 5 and
                                  1 not in [lbcode.ix, lbcode.iy])):
        coord = _dim_or_aux(blev, long_name='pressure', units='hPa')
        coords_and_dims.append((coord, dim))

    # Air potential temperature.
    if (len(lbcode) != 5) and \
            (lbvc == 19):
        coord = _dim_or_aux(blev,
                            standard_name='air_potential_temperature',
                            units='K',
                            attributes={'positive': 'up'})
        coords_and_dims.append((coord, dim))

    # Hybrid pressure levels.
    if lbvc == 9:
        model_level_number = _dim_or_aux(model_level_number,
                                         standard_name='model_level_number',
                                         attributes={'positive': 'up'})
        level_pressure = _dim_or_aux(bhlev,
                                     long_name='level_pressure',
                                     units='Pa',
                                     bounds=np.vstack((bhrlev, brsvd2)).T)
        sigma = AuxCoord(blev,
                         long_name='sigma',
                         bounds=np.vstack((brlev, brsvd1)).T)
        coords_and_dims.extend([(model_level_number, dim),
                                (level_pressure, dim), (sigma, dim)])
        factories.append(
            Factory(HybridPressureFactory,
                    [{
                        'long_name': 'level_pressure'
                    }, {
                        'long_name': 'sigma'
                    },
                     Reference('surface_air_pressure')]))

    # Hybrid height levels.
    if lbvc == 65:
        model_level_number = _dim_or_aux(model_level_number,
                                         standard_name='model_level_number',
                                         attributes={'positive': 'up'})
        level_height = _dim_or_aux(blev,
                                   long_name='level_height',
                                   units='m',
                                   bounds=np.vstack((brlev, brsvd1)).T,
                                   attributes={'positive': 'up'})
        sigma = AuxCoord(bhlev,
                         long_name='sigma',
                         bounds=np.vstack((bhrlev, brsvd2)).T)
        coords_and_dims.extend([(model_level_number, dim), (level_height, dim),
                                (sigma, dim)])
        factories.append(
            Factory(HybridHeightFactory, [{
                'long_name': 'level_height'
            }, {
                'long_name': 'sigma'
            },
                                          Reference('orography')]))

    return coords_and_dims, factories
Beispiel #43
0
def _all_other_rules(f):
    """
    This deals with all the other rules that have not been factored into any of
    the other convert_scalar_coordinate functions above.

    """
    references = []
    standard_name = None
    long_name = None
    units = None
    attributes = {}
    cell_methods = []
    dim_coords_and_dims = []
    aux_coords_and_dims = []

    # Season coordinates (--> scalar coordinates)
    if (f.lbtim.ib == 3 and f.lbtim.ic in [1, 2, 4]
            and (len(f.lbcode) != 5 or
                 (len(f.lbcode) == 5 and
                  (f.lbcode.ix not in [20, 21, 22, 23]
                   and f.lbcode.iy not in [20, 21, 22, 23]))) and f.lbmon == 12
            and f.lbdat == 1 and f.lbhr == 0 and f.lbmin == 0 and f.lbmond == 3
            and f.lbdatd == 1 and f.lbhrd == 0 and f.lbmind == 0):
        aux_coords_and_dims.append((AuxCoord('djf',
                                             long_name='season',
                                             units='no_unit'), None))

    if (f.lbtim.ib == 3 and f.lbtim.ic in [1, 2, 4]
            and ((len(f.lbcode) != 5) or
                 (len(f.lbcode) == 5 and f.lbcode.ix not in [20, 21, 22, 23]
                  and f.lbcode.iy not in [20, 21, 22, 23])) and f.lbmon == 3
            and f.lbdat == 1 and f.lbhr == 0 and f.lbmin == 0 and f.lbmond == 6
            and f.lbdatd == 1 and f.lbhrd == 0 and f.lbmind == 0):
        aux_coords_and_dims.append((AuxCoord('mam',
                                             long_name='season',
                                             units='no_unit'), None))

    if (f.lbtim.ib == 3 and f.lbtim.ic in [1, 2, 4]
            and ((len(f.lbcode) != 5) or
                 (len(f.lbcode) == 5 and f.lbcode.ix not in [20, 21, 22, 23]
                  and f.lbcode.iy not in [20, 21, 22, 23])) and f.lbmon == 6
            and f.lbdat == 1 and f.lbhr == 0 and f.lbmin == 0 and f.lbmond == 9
            and f.lbdatd == 1 and f.lbhrd == 0 and f.lbmind == 0):
        aux_coords_and_dims.append((AuxCoord('jja',
                                             long_name='season',
                                             units='no_unit'), None))

    if (f.lbtim.ib == 3 and f.lbtim.ic in [1, 2, 4]
            and ((len(f.lbcode) != 5) or
                 (len(f.lbcode) == 5 and f.lbcode.ix not in [20, 21, 22, 23]
                  and f.lbcode.iy not in [20, 21, 22, 23])) and f.lbmon == 9
            and f.lbdat == 1 and f.lbhr == 0 and f.lbmin == 0
            and f.lbmond == 12 and f.lbdatd == 1 and f.lbhrd == 0
            and f.lbmind == 0):
        aux_coords_and_dims.append((AuxCoord('son',
                                             long_name='season',
                                             units='no_unit'), None))

    # Special case where year is zero and months match.
    # Month coordinates (--> scalar coordinates)
    if (f.lbtim.ib == 2 and f.lbtim.ic in [1, 2, 4]
            and ((len(f.lbcode) != 5) or
                 (len(f.lbcode) == 5 and f.lbcode.ix not in [20, 21, 22, 23]
                  and f.lbcode.iy not in [20, 21, 22, 23])) and f.lbyr == 0
            and f.lbyrd == 0 and f.lbmon == f.lbmond):
        aux_coords_and_dims.append((AuxCoord(f.lbmon,
                                             long_name='month_number'), None))
        aux_coords_and_dims.append((AuxCoord(calendar.month_abbr[f.lbmon],
                                             long_name='month',
                                             units='no_unit'), None))
        aux_coords_and_dims.append((DimCoord(points=f.lbft,
                                             standard_name='forecast_period',
                                             units='hours'), None))

    # "Normal" (i.e. not cross-sectional) lats+lons (--> vector coordinates)
    if (f.bdx != 0.0 and f.bdx != f.bmdi and len(f.lbcode) != 5
            and f.lbcode[0] == 1):
        dim_coords_and_dims.append(
            (DimCoord.from_regular(f.bzx,
                                   f.bdx,
                                   f.lbnpt,
                                   standard_name=f._x_coord_name(),
                                   units='degrees',
                                   circular=(f.lbhem in [0, 4]),
                                   coord_system=f.coord_system()), 1))

    if (f.bdx != 0.0 and f.bdx != f.bmdi and len(f.lbcode) != 5
            and f.lbcode[0] == 2):
        dim_coords_and_dims.append(
            (DimCoord.from_regular(f.bzx,
                                   f.bdx,
                                   f.lbnpt,
                                   standard_name=f._x_coord_name(),
                                   units='degrees',
                                   circular=(f.lbhem in [0, 4]),
                                   coord_system=f.coord_system(),
                                   with_bounds=True), 1))

    if (f.bdy != 0.0 and f.bdy != f.bmdi and len(f.lbcode) != 5
            and f.lbcode[0] == 1):
        dim_coords_and_dims.append(
            (DimCoord.from_regular(f.bzy,
                                   f.bdy,
                                   f.lbrow,
                                   standard_name=f._y_coord_name(),
                                   units='degrees',
                                   coord_system=f.coord_system()), 0))

    if (f.bdy != 0.0 and f.bdy != f.bmdi and len(f.lbcode) != 5
            and f.lbcode[0] == 2):
        dim_coords_and_dims.append(
            (DimCoord.from_regular(f.bzy,
                                   f.bdy,
                                   f.lbrow,
                                   standard_name=f._y_coord_name(),
                                   units='degrees',
                                   coord_system=f.coord_system(),
                                   with_bounds=True), 0))

    if ((f.bdy == 0.0 or f.bdy == f.bmdi) and
        (len(f.lbcode) != 5 or (len(f.lbcode) == 5 and f.lbcode.iy == 10))):
        dim_coords_and_dims.append(
            (DimCoord(f.y,
                      standard_name=f._y_coord_name(),
                      units='degrees',
                      bounds=f.y_bounds,
                      coord_system=f.coord_system()), 0))

    if ((f.bdx == 0.0 or f.bdx == f.bmdi) and
        (len(f.lbcode) != 5 or (len(f.lbcode) == 5 and f.lbcode.ix == 11))):
        dim_coords_and_dims.append(
            (DimCoord(f.x,
                      standard_name=f._x_coord_name(),
                      units='degrees',
                      bounds=f.x_bounds,
                      circular=(f.lbhem in [0, 4]),
                      coord_system=f.coord_system()), 1))

    # Cross-sectional vertical level types (--> vector coordinates)
    if (len(f.lbcode) == 5 and f.lbcode.iy == 2
            and (f.bdy == 0 or f.bdy == f.bmdi)):
        dim_coords_and_dims.append((DimCoord(f.y,
                                             standard_name='height',
                                             units='km',
                                             bounds=f.y_bounds,
                                             attributes={'positive':
                                                         'up'}), 0))

    if (len(f.lbcode) == 5 and f.lbcode[-1] == 1 and f.lbcode.iy == 4):
        dim_coords_and_dims.append((DimCoord(f.y,
                                             standard_name='depth',
                                             units='m',
                                             bounds=f.y_bounds,
                                             attributes={'positive':
                                                         'down'}), 0))

    if (len(f.lbcode) == 5 and f.lbcode.ix == 10 and f.bdx != 0
            and f.bdx != f.bmdi):
        dim_coords_and_dims.append(
            (DimCoord.from_regular(f.bzx,
                                   f.bdx,
                                   f.lbnpt,
                                   standard_name=f._y_coord_name(),
                                   units='degrees',
                                   coord_system=f.coord_system()), 1))

    if (len(f.lbcode) == 5 and f.lbcode.iy == 1
            and (f.bdy == 0 or f.bdy == f.bmdi)):
        dim_coords_and_dims.append((DimCoord(f.y,
                                             long_name='pressure',
                                             units='hPa',
                                             bounds=f.y_bounds), 0))

    if (len(f.lbcode) == 5 and f.lbcode.ix == 1
            and (f.bdx == 0 or f.bdx == f.bmdi)):
        dim_coords_and_dims.append((DimCoord(f.x,
                                             long_name='pressure',
                                             units='hPa',
                                             bounds=f.x_bounds), 1))

    # Cross-sectional time values (--> vector coordinates)
    if (len(f.lbcode) == 5 and f.lbcode[-1] == 1 and f.lbcode.iy == 23):
        dim_coords_and_dims.append(
            (DimCoord(f.y,
                      standard_name='time',
                      units=cf_units.Unit('days since 0000-01-01 00:00:00',
                                          calendar=cf_units.CALENDAR_360_DAY),
                      bounds=f.y_bounds), 0))

    if (len(f.lbcode) == 5 and f.lbcode[-1] == 1 and f.lbcode.ix == 23):
        dim_coords_and_dims.append(
            (DimCoord(f.x,
                      standard_name='time',
                      units=cf_units.Unit('days since 0000-01-01 00:00:00',
                                          calendar=cf_units.CALENDAR_360_DAY),
                      bounds=f.x_bounds), 1))

    if (len(f.lbcode) == 5 and f.lbcode[-1] == 3 and f.lbcode.iy == 23
            and f.lbtim.ib == 2 and f.lbtim.ic == 2):
        epoch_days_unit = cf_units.Unit('days since 0000-01-01 00:00:00',
                                        calendar=cf_units.CALENDAR_360_DAY)
        t1_epoch_days = epoch_days_unit.date2num(f.t1)
        t2_epoch_days = epoch_days_unit.date2num(f.t2)
        # The end time is exclusive, not inclusive.
        dim_coords_and_dims.append((DimCoord(np.linspace(t1_epoch_days,
                                                         t2_epoch_days,
                                                         f.lbrow,
                                                         endpoint=False),
                                             standard_name='time',
                                             units=epoch_days_unit,
                                             bounds=f.y_bounds), 0))

    # Site number (--> scalar coordinate)
    if (len(f.lbcode) == 5 and f.lbcode[-1] == 1 and f.lbcode.ix == 13
            and f.bdx != 0):
        dim_coords_and_dims.append(
            (DimCoord.from_regular(f.bzx,
                                   f.bdx,
                                   f.lbnpt,
                                   long_name='site_number',
                                   units='1'), 1))

    # Site number cross-sections (???)
    if (len(f.lbcode) == 5 and 13 in [f.lbcode.ix, f.lbcode.iy]
            and 11 not in [f.lbcode.ix, f.lbcode.iy]
            and hasattr(f, 'lower_x_domain') and hasattr(f, 'upper_x_domain')
            and all(f.lower_x_domain != -1.e+30)
            and all(f.upper_x_domain != -1.e+30)):
        aux_coords_and_dims.append((AuxCoord(
            (f.lower_x_domain + f.upper_x_domain) / 2.0,
            standard_name=f._x_coord_name(),
            units='degrees',
            bounds=np.array([f.lower_x_domain, f.upper_x_domain]).T,
            coord_system=f.coord_system()), 1 if f.lbcode.ix == 13 else 0))

    if (len(f.lbcode) == 5 and 13 in [f.lbcode.ix, f.lbcode.iy]
            and 10 not in [f.lbcode.ix, f.lbcode.iy]
            and hasattr(f, 'lower_y_domain') and hasattr(f, 'upper_y_domain')
            and all(f.lower_y_domain != -1.e+30)
            and all(f.upper_y_domain != -1.e+30)):
        aux_coords_and_dims.append((AuxCoord(
            (f.lower_y_domain + f.upper_y_domain) / 2.0,
            standard_name=f._y_coord_name(),
            units='degrees',
            bounds=np.array([f.lower_y_domain, f.upper_y_domain]).T,
            coord_system=f.coord_system()), 1 if f.lbcode.ix == 13 else 0))

    # LBPROC codings (--> cell method + attributes)
    unhandled_lbproc = True
    zone_method = None
    time_method = None
    if f.lbproc == 0:
        unhandled_lbproc = False
    elif f.lbproc == 64:
        zone_method = 'mean'
    elif f.lbproc == 128:
        time_method = 'mean'
    elif f.lbproc == 4096:
        time_method = 'minimum'
    elif f.lbproc == 8192:
        time_method = 'maximum'
    elif f.lbproc == 192:
        time_method = 'mean'
        zone_method = 'mean'

    if time_method is not None:
        if f.lbtim.ia != 0:
            intervals = '{} hour'.format(f.lbtim.ia)
        else:
            intervals = None

        if f.lbtim.ib == 2:
            # Aggregation over a period of time.
            cell_methods.append(
                CellMethod(time_method, coords='time', intervals=intervals))
            unhandled_lbproc = False
        elif f.lbtim.ib == 3 and f.lbproc == 128:
            # Aggregation over a period of time within a year, over a number
            # of years.
            # Only mean (lbproc of 128) is handled as the min/max
            # interpretation is ambiguous e.g. decadal mean of daily max,
            # decadal max of daily mean, decadal mean of max daily mean etc.
            cell_methods.append(
                CellMethod('{} within years'.format(time_method),
                           coords='time',
                           intervals=intervals))
            cell_methods.append(
                CellMethod('{} over years'.format(time_method), coords='time'))
            unhandled_lbproc = False
        else:
            # Generic cell method to indicate a time aggregation.
            cell_methods.append(CellMethod(time_method, coords='time'))
            unhandled_lbproc = False

    if zone_method is not None:
        if f.lbcode == 1:
            cell_methods.append(CellMethod(zone_method, coords='longitude'))
            for coord, _dim in dim_coords_and_dims:
                if coord.standard_name == 'longitude':
                    if len(coord.points) == 1:
                        coord.bounds = np.array([0., 360.], dtype=np.float32)
                    else:
                        coord.guess_bounds()
            unhandled_lbproc = False
        elif f.lbcode == 101:
            cell_methods.append(
                CellMethod(zone_method, coords='grid_longitude'))
            for coord, _dim in dim_coords_and_dims:
                if coord.standard_name == 'grid_longitude':
                    if len(coord.points) == 1:
                        coord.bounds = np.array([0., 360.], dtype=np.float32)
                    else:
                        coord.guess_bounds()
            unhandled_lbproc = False
        else:
            unhandled_lbproc = True

    if unhandled_lbproc:
        attributes["ukmo__process_flags"] = tuple(
            sorted([
                name for value, name in six.iteritems(LBPROC_MAP)
                if isinstance(value, int) and f.lbproc & value
            ]))

    if (f.lbsrce % 10000) == 1111:
        attributes['source'] = 'Data from Met Office Unified Model'
        # Also define MO-netCDF compliant UM version.
        um_major = (f.lbsrce // 10000) // 100
        if um_major != 0:
            um_minor = (f.lbsrce // 10000) % 100
            attributes['um_version'] = '{:d}.{:d}'.format(um_major, um_minor)

    if (f.lbuser[6] != 0 or (f.lbuser[3] // 1000) != 0
            or (f.lbuser[3] % 1000) != 0):
        attributes['STASH'] = f.stash

    if str(f.stash) in STASH_TO_CF:
        standard_name = STASH_TO_CF[str(f.stash)].standard_name
        units = STASH_TO_CF[str(f.stash)].units
        long_name = STASH_TO_CF[str(f.stash)].long_name

    if (not f.stash.is_valid and f.lbfc in LBFC_TO_CF):
        standard_name = LBFC_TO_CF[f.lbfc].standard_name
        units = LBFC_TO_CF[f.lbfc].units
        long_name = LBFC_TO_CF[f.lbfc].long_name

    # Orography reference field (--> reference target)
    if f.lbuser[3] == 33:
        references.append(ReferenceTarget('orography', None))

    # Surface pressure reference field (--> reference target)
    if f.lbuser[3] == 409 or f.lbuser[3] == 1:
        references.append(ReferenceTarget('surface_air_pressure', None))

    return (references, standard_name, long_name, units, attributes,
            cell_methods, dim_coords_and_dims, aux_coords_and_dims)
Beispiel #44
0
def load_NAMEIII_trajectory(filename):
    """
    Load a NAME III trajectory file returning a
    generator of :class:`iris.cube.Cube` instances.

    Args:

    * filename (string):
        Name of file to load.

    Returns:
        A generator :class:`iris.cube.Cube` instances.

    """
    time_unit = cf_units.Unit('hours since epoch',
                              calendar=cf_units.CALENDAR_GREGORIAN)

    with open(filename, 'r') as infile:
        header = read_header(infile)

        # read the column headings
        for line in infile:
            if line.startswith("    "):
                break
        headings = [heading.strip() for heading in line.split(",")]

        # read the columns
        columns = [[] for i in range(len(headings))]
        for line in infile:
            values = [v.strip() for v in line.split(",")]
            for c, v in enumerate(values):
                if "UTC" in v:
                    v = v.replace(":00 ", " ")  # Strip out milliseconds.
                    v = datetime.datetime.strptime(v, NAMEIII_DATETIME_FORMAT)
                else:
                    try:
                        v = float(v)
                    except ValueError:
                        pass
                columns[c].append(v)

    # Where's the Z column?
    z_column = None
    for i, heading in enumerate(headings):
        if heading.startswith("Z "):
            z_column = i
            break
    if z_column is None:
        raise TranslationError("Expected a Z column")

    # Every column up to Z becomes a coordinate.
    coords = []
    for name, values in zip(headings[:z_column+1], columns[:z_column+1]):
        values = np.array(values)
        if np.all(np.array(values) == values[0]):
            values = [values[0]]

        standard_name = long_name = units = None
        if isinstance(values[0], datetime.datetime):
            values = time_unit.date2num(values)
            units = time_unit
            if name == "Time":
                name = "time"
        elif " (Lat-Long)" in name:
            if name.startswith("X"):
                name = "longitude"
            elif name.startswith("Y"):
                name = "latitude"
            units = "degrees"
        elif name == "Z (m asl)":
            name = "altitude"
            units = "m"
            long_name = "altitude above sea level"
        elif name == "Z (m agl)":
            name = 'height'
            units = "m"
            long_name = "height above ground level"
        elif name == "Z (FL)":
            name = "flight_level"
            long_name = name

        try:
            coord = DimCoord(values, units=units)
        except ValueError:
            coord = AuxCoord(values, units=units)
        coord.rename(name)
        if coord.long_name is None and long_name is not None:
            coord.long_name = long_name
        coords.append(coord)

    # Every numerical column after the Z becomes a cube.
    for name, values in zip(headings[z_column+1:], columns[z_column+1:]):
        try:
            float(values[0])
        except ValueError:
            continue
        # units embedded in column heading?
        name, units = _split_name_and_units(name)
        cube = iris.cube.Cube(values, units=units)
        cube.rename(name)
        for coord in coords:
            dim = 0 if len(coord.points) > 1 else None
            if isinstance(coord, DimCoord) and coord.name() == "time":
                cube.add_dim_coord(coord.copy(), dim)
            else:
                cube.add_aux_coord(coord.copy(), dim)
        yield cube
Beispiel #45
0
 def test_coord_standard_name(self):
     token = "air_temperature"
     coord = AuxCoord(1, standard_name=token)
     self._check(token, coord)
Beispiel #46
0
 def test_mixture(self):
     token = "air_temperature"
     coord = AuxCoord(1, standard_name=token)
     result = CellMethod(self.method, coords=[coord, token])
     expected = "{}: {}, {}".format(self.method, token, token)
     self.assertEqual(str(result), expected)
Beispiel #47
0
def get_cube(name, lat=((0, 1), (2, 3)), lon=((0, 1), (2, 3))):
    cube = Cube(np.ones((2, 2)), name)
    cube.add_aux_coord(AuxCoord(lat, 'latitude'), (0, 1))
    cube.add_aux_coord(AuxCoord(lon, 'longitude'), (0, 1))
    return cube
Beispiel #48
0
 def test_coord_stash_default(self):
     token = "_stash"  # includes leading underscore
     coord = AuxCoord(1, attributes=dict(STASH=token))
     self._check(token, coord, default=True)
Beispiel #49
0
 def test_coord_with_irregular_step(self):
     name = "latitude"
     coord = AuxCoord(np.array([2, 5, 1, 4]), standard_name=name)
     exp_emsg = "{} is not regular".format(name)
     with self.assertRaisesRegex(CoordinateNotRegularError, exp_emsg):
         regular_step(coord)
Beispiel #50
0
 def test_coord_stash(self):
     token = "stash"
     coord = AuxCoord(1, attributes=dict(STASH=token))
     self._check(token, coord, default=True)
Beispiel #51
0
 def test_coord_with_string_points(self):
     # Check that a `TypeError` is captured.
     coord = AuxCoord(["a", "b", "c"])
     result = is_regular(coord)
     self.assertFalse(result)
Beispiel #52
0
 def test_coord_var_name_fail(self):
     token = "var name"  # includes space
     emsg = "is not a valid NetCDF variable name"
     with self.assertRaisesRegex(ValueError, emsg):
         AuxCoord(1, var_name=token)
Beispiel #53
0
 def test_aux_coord(self):
     dtype = np.int64
     points = np.array([1, 2, 3], dtype=dtype)
     aux_coord = AuxCoord(points)
     self._check_call(aux_coord, dtype)
Beispiel #54
0
 def test_coord_var_name(self):
     token = "var_name"
     coord = AuxCoord(1, var_name=token)
     self._check(token, coord)
Beispiel #55
0
def sample_mesh_cube(
    nomesh=False, n_z=2, with_parts=False, **meshcoord_kwargs
):
    """
    Create a 2d test cube with 1 'normal' and 1 unstructured dimension (with a Mesh).

    Result contains : dimcoords for both dims; an auxcoord on the unstructured dim; 2 mesh-coords.
    By default, the mesh is provided by :func:`sample_mesh`, so coordinates and connectivity  are not realistic.

    Kwargs:
    * nomesh(bool):
        If set, don't add MeshCoords, so dim 1 is just a plain anonymous dim.
    * n_z (int):
        Length of the 'normal' dim.  If 0, it is *omitted*.
    * with_parts (bool):
        If set, return all the constituent component coords
    * meshcoord_kwargs (dict):
        Extra controls passed to :func:`sample_meshcoord` for MeshCoord creation, to allow user-specified
        location/mesh.  The 'axis' key is not available, as we always add both an 'x' and 'y' MeshCOord.

    Returns:
    * cube  :  if with_parts not set
    * (cube, parts)  : if with_parts is set
        'parts' is (mesh, dim0-dimcoord, dim1-dimcoord, dim1-auxcoord, x-meshcoord [or None], y-meshcoord [or None]).

    """
    if nomesh:
        mesh = None
        n_faces = 5
    else:
        mesh = meshcoord_kwargs.pop("mesh", None)
        if mesh is None:
            mesh = sample_mesh()
        meshx, meshy = (
            sample_meshcoord(axis=axis, mesh=mesh, **meshcoord_kwargs)
            for axis in ("x", "y")
        )
        n_faces = meshx.shape[0]

    mesh_dimco = DimCoord(
        np.arange(n_faces), long_name="i_mesh_face", units="1"
    )

    auxco_x = AuxCoord(np.zeros(n_faces), long_name="mesh_face_aux", units="1")

    zco = DimCoord(np.arange(n_z), long_name="level", units=1)
    cube = Cube(np.zeros((n_z, n_faces)), long_name="mesh_phenom")
    cube.add_dim_coord(zco, 0)
    if nomesh:
        mesh_coords = []
    else:
        mesh_coords = [meshx, meshy]

    cube.add_dim_coord(mesh_dimco, 1)
    for co in mesh_coords + [auxco_x]:
        cube.add_aux_coord(co, 1)

    if not with_parts:
        result = cube
    else:
        if nomesh:
            meshx, meshy = None, None
        parts = (mesh, zco, mesh_dimco, auxco_x, meshx, meshy)
        result = (cube, parts)

    return result
Beispiel #56
0
 def test_coord_long_name_default(self):
     token = "long name"  # includes space
     coord = AuxCoord(1, long_name=token)
     self._check(token, coord, default=True)
Beispiel #57
0
def simple_3d_w_multidim_coords(with_bounds=True):
    """
    Returns an abstract, two-dimensional, optionally bounded, cube.

    >>> print(simple_3d_w_multidim_coords())
    thingness                           (wibble: 2; *ANONYMOUS*: 3; *ANONYMOUS*: 4)
         Dimension coordinates:
              wibble                           x               -               -
         Auxiliary coordinates:
              bar                              -               x               x
              foo                              -               x               x

    >>> print(simple_3d_w_multidim_coords().data)
    [[[ 0  1  2  3]
      [ 4  5  6  7]
      [ 8  9 10 11]]

     [[12 13 14 15]
      [16 17 18 19]
      [20 21 22 23]]]

    """
    cube = Cube(np.arange(24, dtype=np.int32).reshape((2, 3, 4)))
    cube.long_name = "thingness"
    cube.units = "1"

    y_points = np.array(
        [
            [2.5, 7.5, 12.5, 17.5],
            [10.0, 17.5, 27.5, 42.5],
            [15.0, 22.5, 32.5, 50.0],
        ]
    )
    y_bounds = np.array(
        [
            [[0, 5], [5, 10], [10, 15], [15, 20]],
            [[5, 15], [15, 20], [20, 35], [35, 50]],
            [[10, 20], [20, 25], [25, 40], [40, 60]],
        ],
        dtype=np.int32,
    )
    y_coord = AuxCoord(
        points=y_points,
        long_name="bar",
        units="1",
        bounds=y_bounds if with_bounds else None,
    )
    x_points = np.array(
        [
            [-7.5, 7.5, 22.5, 37.5],
            [-12.5, 4.0, 26.5, 47.5],
            [2.5, 14.0, 36.5, 44.0],
        ]
    )
    x_bounds = np.array(
        [
            [[-15, 0], [0, 15], [15, 30], [30, 45]],
            [[-25, 0], [0, 8], [8, 45], [45, 50]],
            [[-5, 10], [10, 18], [18, 55], [18, 70]],
        ],
        dtype=np.int32,
    )
    x_coord = AuxCoord(
        points=x_points,
        long_name="foo",
        units="1",
        bounds=x_bounds if with_bounds else None,
    )
    wibble_coord = DimCoord(
        np.array([10.0, 30.0], dtype=np.float32), long_name="wibble", units="1"
    )

    cube.add_dim_coord(wibble_coord, [0])
    cube.add_aux_coord(y_coord, [1, 2])
    cube.add_aux_coord(x_coord, [1, 2])
    return cube
Beispiel #58
0
 def test_coord_long_name(self):
     token = "long_name"
     coord = AuxCoord(1, long_name=token)
     self._check(token, coord)
Beispiel #59
0
def _generate_cubes(header, column_headings, coords, data_arrays,
                    cell_methods=None):
    """
    Yield :class:`iris.cube.Cube` instances given
    the headers, column headings, coords and data_arrays extracted
    from a NAME file.

    """
    for i, data_array in enumerate(data_arrays):
        # Turn the dictionary of column headings with a list of header
        # information for each field into a dictionary of headings for
        # just this field.
        field_headings = {k: v[i] for k, v in six.iteritems(column_headings)}

        # Make a cube.
        cube = iris.cube.Cube(data_array)

        # Determine the name and units.
        name = '{} {}'.format(field_headings['Species'],
                              field_headings['Quantity'])
        name = name.upper().replace(' ', '_')
        cube.rename(name)

        # Some units are not in SI units, are missing spaces or typed
        # in the wrong case. _parse_units returns units that are
        # recognised by Iris.
        cube.units = _parse_units(field_headings['Units'])

        # Define and add the singular coordinates of the field (flight
        # level, time etc.)
        if 'Z' in field_headings:
            upper_bound, = [field_headings['... to [Z]']
                            if '... to [Z]' in field_headings else None]
            lower_bound, = [field_headings['... from [Z]']
                            if '... from [Z]' in field_headings else None]
            z_coord = _cf_height_from_name(field_headings['Z'],
                                           upper_bound=upper_bound,
                                           lower_bound=lower_bound)
            cube.add_aux_coord(z_coord)

        # Define the time unit and use it to serialise the datetime for
        # the time coordinate.
        time_unit = cf_units.Unit(
            'hours since epoch', calendar=cf_units.CALENDAR_GREGORIAN)

        # Build time, height, latitude and longitude coordinates.
        for coord in coords:
            pts = coord.values
            coord_sys = None
            if coord.name == 'latitude' or coord.name == 'longitude':
                coord_units = 'degrees'
                coord_sys = iris.coord_systems.GeogCS(EARTH_RADIUS)
            if coord.name == 'height':
                coord_units = 'm'
                long_name = 'height above ground level'
                pts = coord.values
            if coord.name == 'altitude':
                coord_units = 'm'
                long_name = 'altitude above sea level'
                pts = coord.values
            if coord.name == 'air_pressure':
                coord_units = 'Pa'
                pts = coord.values
            if coord.name == 'flight_level':
                pts = coord.values
                long_name = 'flight_level'
                coord_units = _parse_units('FL')
            if coord.name == 'time':
                coord_units = time_unit
                pts = time_unit.date2num(coord.values)

            if coord.dimension is not None:
                if coord.name == 'longitude':
                    circular = iris.util._is_circular(pts, 360.0)
                else:
                    circular = False
                if coord.name == 'flight_level':
                    icoord = DimCoord(points=pts,
                                      units=coord_units,
                                      long_name=long_name,)
                else:
                    icoord = DimCoord(points=pts,
                                      standard_name=coord.name,
                                      units=coord_units,
                                      coord_system=coord_sys,
                                      circular=circular)
                if coord.name == 'height' or coord.name == 'altitude':
                    icoord.long_name = long_name
                if coord.name == 'time' and 'Av or Int period' in \
                        field_headings:
                    dt = coord.values - \
                        field_headings['Av or Int period']
                    bnds = time_unit.date2num(
                        np.vstack((dt, coord.values)).T)
                    icoord.bounds = bnds
                else:
                    icoord.guess_bounds()
                cube.add_dim_coord(icoord, coord.dimension)
            else:
                icoord = AuxCoord(points=pts[i],
                                  standard_name=coord.name,
                                  coord_system=coord_sys,
                                  units=coord_units)
                if coord.name == 'time' and 'Av or Int period' in \
                        field_headings:
                    dt = coord.values - \
                        field_headings['Av or Int period']
                    bnds = time_unit.date2num(
                        np.vstack((dt, coord.values)).T)
                    icoord.bounds = bnds[i, :]
                cube.add_aux_coord(icoord)

        # Headings/column headings which are encoded elsewhere.
        headings = ['X', 'Y', 'Z', 'Time', 'T', 'Units',
                    'Av or Int period',
                    '... from [Z]', '... to [Z]',
                    'X grid origin', 'Y grid origin',
                    'X grid size', 'Y grid size',
                    'X grid resolution', 'Y grid resolution',
                    'Number of field cols', 'Number of preliminary cols',
                    'Number of fields', 'Number of series',
                    'Output format', ]

        # Add the Main Headings as attributes.
        for key, value in six.iteritems(header):
            if value is not None and value != '' and \
                    key not in headings:
                cube.attributes[key] = value

        # Add the Column Headings as attributes
        for key, value in six.iteritems(field_headings):
            if value is not None and value != '' and \
                    key not in headings:
                cube.attributes[key] = value

        if cell_methods is not None:
            cube.add_cell_method(cell_methods[i])

        yield cube
Beispiel #60
0
def _cf_height_from_name(z_coord, lower_bound=None, upper_bound=None):
    """
    Parser for the z component of field headings.

    This parse is specifically for handling the z component of NAME field
    headings, which include height above ground level, height above sea level
    and flight level etc.  This function returns an iris coordinate
    representing this field heading.

    Args:

    * z_coord (list):
        A field heading, specifically the z component.

    Returns:
        An instance of :class:`iris.coords.AuxCoord` representing the
        interpretation of the supplied field heading.

    """

    # NAMEII - integer/float support.
    # Match against height agl, asl and Pa.
    pattern = re.compile(r"^From\s*"
                         r"(?P<lower_bound>[0-9]+(\.[0-9]+)?)"
                         r"\s*-\s*"
                         r"(?P<upper_bound>[0-9]+(\.[0-9]+)?)"
                         r"\s*(?P<type>m\s*asl|m\s*agl|Pa)"
                         r"(?P<extra>.*)")

    # Match against flight level.
    pattern_fl = re.compile(r"^From\s*"
                            r"(?P<type>FL)"
                            r"(?P<lower_bound>[0-9]+(\.[0-9]+)?)"
                            r"\s*-\s*FL"
                            r"(?P<upper_bound>[0-9]+(\.[0-9]+)?)"
                            r"(?P<extra>.*)")

    # NAMEIII - integer/float support.
    # Match scalar against height agl, asl, Pa, FL
    pattern_scalar = re.compile(r"Z\s*=\s*"
                                r"(?P<point>[0-9]+(\.[0-9]+)?([eE][+-]?\d+)?)"
                                r"\s*(?P<type>m\s*agl|m\s*asl|FL|Pa)"
                                r"(?P<extra>.*)")

    type_name = {
        "magl": "height",
        "masl": "altitude",
        "FL": "flight_level",
        "Pa": "air_pressure",
    }
    patterns = [pattern, pattern_fl, pattern_scalar]

    units = "no-unit"
    points = z_coord
    bounds = None
    standard_name = None
    long_name = "z"

    if upper_bound is not None and lower_bound is not None:
        match_ub = pattern_scalar.match(upper_bound)
        match_lb = pattern_scalar.match(lower_bound)

    for pattern in patterns:
        match = pattern.match(z_coord)
        if match:
            match = match.groupdict()
            # Do not interpret if there is additional information to the match
            if match["extra"]:
                break
            units = match["type"].replace(" ", "")
            name = type_name[units]

            # Interpret points if present.
            if "point" in match:
                points = float(match["point"])
                if upper_bound is not None and lower_bound is not None:
                    bounds = np.array([
                        float(match_lb.groupdict()["point"]),
                        float(match_ub.groupdict()["point"]),
                    ])
            # Interpret points from bounds.
            else:
                bounds = np.array(
                    [float(match["lower_bound"]),
                     float(match["upper_bound"])])
                points = bounds.sum() / 2.0

            long_name = None
            if name == "altitude":
                units = units[0]
                standard_name = name
                long_name = "altitude above sea level"
            elif name == "height":
                units = units[0]
                standard_name = name
                long_name = "height above ground level"
            elif name == "air_pressure":
                standard_name = name
            elif name == "flight_level":
                long_name = name
            units = _parse_units(units)

            break

    coord = AuxCoord(
        points,
        units=units,
        standard_name=standard_name,
        long_name=long_name,
        bounds=bounds,
    )

    return coord