Ejemplo n.º 1
0
 def test_unhandled_vertical(self):
     # unhandled level type
     cube = self._load_basic()
     cube.coord("pressure").rename("not the messiah")
     saved_grib = iris.util.create_temp_filename(suffix='.grib2')
     self.assertRaises(iris.exceptions.TranslationError, iris.save, cube, saved_grib)
     os.remove(saved_grib)
Ejemplo n.º 2
0
    def test_contrived_differential1(self):
        # testing :
        # F = ( cos(lat) cos(lon) )
        # dF/dLon = - sin(lon) cos(lat)     (and to simplify /cos(lat) )
        cube = build_cube(numpy.empty((30, 60)), spherical=True)

        x = cube.coord('longitude')
        y = cube.coord('latitude')
        y_dim = cube.coord_dims(y)[0]

        cos_x_pts = numpy.cos(numpy.radians(x.points)).reshape(1, x.shape[0])
        cos_y_pts = numpy.cos(numpy.radians(y.points)).reshape(y.shape[0], 1)
    
        cube.data = cos_y_pts * cos_x_pts
    
        lon_coord = x.unit_converted('radians')
        lat_coord = y.unit_converted('radians')
        cos_lat_coord = iris.coords.AuxCoord.from_coord(lat_coord)
        cos_lat_coord.points = numpy.cos(lat_coord.points)
        cos_lat_coord.units = '1'
        cos_lat_coord.rename('cos({})'.format(lat_coord.name()))
        
        temp = iris.analysis.calculus.differentiate(cube, lon_coord)
        df_dlon = iris.analysis.maths.divide(temp, cos_lat_coord, y_dim)

        x = df_dlon.coord('longitude')
        y = df_dlon.coord('latitude')
        
        sin_x_pts = numpy.sin(numpy.radians(x.points)).reshape(1, x.shape[0])
        y_ones = numpy.ones((y.shape[0] , 1))
        
        data = - sin_x_pts * y_ones
        result = df_dlon.copy(data=data)
        
        numpy.testing.assert_array_almost_equal(result.data, df_dlon.data, decimal=3)
    def get_coord_pts(self, cube):
        """return (x_pts, x_ones, y_pts, y_ones, z_pts, z_ones) for the given cube."""
        x = cube.coord(axis='X')
        y = cube.coord(axis='Y')
        z = cube.coord(axis='Z')

        if z and z.shape[0] > 1:
            x_shp = (1, 1, x.shape[0])
            y_shp = (1, y.shape[0], 1)
            z_shp = (z.shape[0], 1, 1)
        else:
            x_shp = (1, x.shape[0])
            y_shp = (y.shape[0], 1)
            z_shp = None

        x_pts = x.points.reshape(x_shp)
        y_pts = y.points.reshape(y_shp)

        x_ones = np.ones(x_shp)
        y_ones = np.ones(y_shp)

        if z_shp:
            z_pts = z.points.reshape(z_shp)
            z_ones = np.ones(z_shp)
        else:
            z_pts = None
            z_ones = None

        return (x_pts, x_ones, y_pts, y_ones, z_pts, z_ones)
Ejemplo n.º 4
0
 def test_scalar_int32_pressure(self):
     # Make sure we can save a scalar int32 coordinate with unit conversion.
     cube = self._load_basic()
     cube.coord("pressure").points = np.array([200], dtype=np.int32)
     cube.coord("pressure").units = "hPa"
     with self.temp_filename(".grib2") as testfile:
         iris.save(cube, testfile)
    def test_contrived_spherical_curl1(self):
        # testing:
        # F(lon, lat, r) = (- r sin(lon), -r cos(lon) sin(lat), 0)
        # curl( F(x, y, z) ) = (0, 0, 0)
        cube = build_cube(np.empty((30, 60)), spherical=True)
        radius = iris.analysis.cartography.DEFAULT_SPHERICAL_EARTH_RADIUS

        x = cube.coord('longitude')
        y = cube.coord('latitude')

        cos_x_pts = np.cos(np.radians(x.points)).reshape(1, x.shape[0])
        sin_x_pts = np.sin(np.radians(x.points)).reshape(1, x.shape[0])
        cos_y_pts = np.cos(np.radians(y.points)).reshape(y.shape[0], 1)
        sin_y_pts = np.sin(np.radians(y.points)).reshape(y.shape[0], 1)
        y_ones = np.ones((cube.shape[0], 1))

        u = cube.copy(data=-sin_x_pts * y_ones * radius)
        v = cube.copy(data=-cos_x_pts * sin_y_pts * radius)
        u.rename('u_wind')
        v.rename('v_wind')

        r = iris.analysis.calculus.curl(u, v)[2]

        result = r.copy(data=r.data * 0)

        # Note: This numerical comparison was created when the radius was 1000 times smaller
        np.testing.assert_array_almost_equal(result.data[5:-5], r.data[5:-5]/1000.0, decimal=1)
        self.assertCML(r, ('analysis', 'calculus', 'grad_contrived1.cml'), checksum=False)
Ejemplo n.º 6
0
 def test_string_a_b(self):
     templates = (("a", "0"), ("b", "1"), ("c", "2"), ("d", "3"))
     cubes = [self._make_cube(a, b) for a, b in templates]
     cube = iris.cube.CubeList(cubes).merge()[0]
     self.assertCML(cube, ("merge", "string_a_b.cml"), checksum=False)
     self.assertIsInstance(cube.coord("a"), AuxCoord)
     self.assertIsInstance(cube.coord("b"), AuxCoord)
Ejemplo n.º 7
0
 def test_string_b_with_aux(self):
     templates = ((0, "a"), (1, "b"), (2, "c"), (3, "d"))
     cubes = [self._make_cube(a, b, a_dim=True) for a, b in templates]
     cube = iris.cube.CubeList(cubes).merge()[0]
     self.assertCML(cube, ("merge", "string_b_with_dim.cml"), checksum=False)
     self.assertIsInstance(cube.coord("a"), DimCoord)
     self.assertTrue(cube.coord("a") in cube.dim_coords)
     self.assertIsInstance(cube.coord("b"), AuxCoord)
 def test_bounded_coordinate(self):
     # The results should be exactly the same as for the
     # non-bounded case.
     cube = self.simple2d_cube
     cube.coord('dim1').guess_bounds()
     r = iris.analysis.interpolate.linear(cube, [('dim1', [4, 5])])
     np.testing.assert_array_equal(r.data, np.array([[ 1.5,  2.5,  3.5], [ 3. ,  4. ,  5. ]]))
     self.assertCML(r, ('analysis', 'interpolation', 'linear', 'simple_multiple_points.cml'))
Ejemplo n.º 9
0
 def test_string_a_b(self):
     templates = (('a', '0'), ('b', '1'), ('c', '2'), ('d', '3'))
     cubes = [self._make_cube(a, b) for a, b in templates]
     cube = iris.cube.CubeList(cubes).merge()[0]
     self.assertCML(cube, ('merge', 'string_a_b.cml'),
                    checksum=False)
     self.assertTrue(isinstance(cube.coord('a'), AuxCoord))
     self.assertTrue(isinstance(cube.coord('b'), AuxCoord))
Ejemplo n.º 10
0
 def test_no_coord_system(self):
     cube = iris.load_cube(tests.get_data_path(('PP', 'aPPglob1', 'global.pp')))
     cube.coord('longitude').coord_system = None
     cube.coord('latitude').coord_system = None
     new_cube, extent = iris.analysis.cartography.project(cube,
                                                          self.target_proj)
     self.assertCML(new_cube,
                    ('analysis', 'project', 'default_source_cs.cml'))
Ejemplo n.º 11
0
 def test_string_a_with_dim(self):
     templates = (("a", 0), ("b", 1), ("c", 2), ("d", 3))
     cubes = [self._make_cube(a, b, b_dim=True) for a, b in templates]
     cube = iris.cube.CubeList(cubes).merge()[0]
     self.assertCML(cube, ("merge", "string_a_with_dim.cml"), checksum=False)
     self.assertIsInstance(cube.coord("a"), AuxCoord)
     self.assertIsInstance(cube.coord("b"), DimCoord)
     self.assertTrue(cube.coord("b") in cube.dim_coords)
Ejemplo n.º 12
0
 def test_a_dim_b_dim(self):
     templates = ((0, 10), (1, 11), (2, 12), (3, 13))
     cubes = [self._make_cube(a, b, a_dim=True, b_dim=True) for a, b in templates]
     cube = iris.cube.CubeList(cubes).merge()[0]
     self.assertCML(cube, ("merge", "a_dim_b_dim.cml"), checksum=False)
     self.assertIsInstance(cube.coord("a"), DimCoord)
     self.assertTrue(cube.coord("a") in cube.dim_coords)
     self.assertIsInstance(cube.coord("b"), DimCoord)
     self.assertTrue(cube.coord("b") in cube.aux_coords)
Ejemplo n.º 13
0
 def test_string_a_with_dim(self):
     templates = (('a', 0), ('b', 1), ('c', 2), ('d', 3))
     cubes = [self._make_cube(a, b, b_dim=True) for a, b in templates]
     cube = iris.cube.CubeList(cubes).merge()[0]
     self.assertCML(cube, ('merge', 'string_a_with_dim.cml'),
                    checksum=False)
     self.assertTrue(isinstance(cube.coord('a'), AuxCoord))
     self.assertTrue(isinstance(cube.coord('b'), DimCoord))
     self.assertTrue(cube.coord('b') in cube.dim_coords)
Ejemplo n.º 14
0
 def test_string_b_with_aux(self):
     templates = ((0, 'a'), (1, 'b'), (2, 'c'), (3, 'd'))
     cubes = [self._make_cube(a, b, a_dim=True) for a, b in templates]
     cube = iris.cube.CubeList(cubes).merge()[0]
     self.assertCML(cube, ('merge', 'string_b_with_dim.cml'),
                    checksum=False)
     self.assertTrue(isinstance(cube.coord('a'), DimCoord))
     self.assertTrue(cube.coord('a') in cube.dim_coords)
     self.assertTrue(isinstance(cube.coord('b'), AuxCoord))
Ejemplo n.º 15
0
 def test_no_coord_system(self):
     cube = low_res_4d()
     cube.coord('grid_longitude').coord_system = None
     cube.coord('grid_latitude').coord_system = None
     with iris.tests.mock.patch('warnings.warn') as warn:
         _, _ = project(cube, ROBINSON)
     warn.assert_called_once_with('Coordinate system of latitude and '
                                  'longitude coordinates is not specified. '
                                  'Assuming WGS84 Geodetic.')
Ejemplo n.º 16
0
    def test_fully_wrapped_not_circular(self):
        cube = stock.lat_lon_cube()
        new_long = cube.coord('longitude').copy(
            cube.coord('longitude').points + 710)
        cube.remove_coord('longitude')
        cube.add_dim_coord(new_long, 1)

        interpolator = LinearInterpolator(cube, ['longitude'])
        res = interpolator([-10])
        self.assertArrayEqual(res.data, cube[:, 1].data)
Ejemplo n.º 17
0
 def test_a_dim_b_aux(self):
     templates = ((0, 10), (1, 11), (2, 12), (3, 13))
     cubes = [self._make_cube(a, b, a_dim=True) for a, b in templates]
     cube = iris.cube.CubeList(cubes).merge()[0]
     self.assertCML(cube, ('merge', 'a_dim_b_aux.cml'),
                    checksum=False)
     self.assertTrue(isinstance(cube.coord('a'), DimCoord))
     self.assertTrue(cube.coord('a') in cube.dim_coords)
     self.assertTrue(isinstance(cube.coord('b'), DimCoord))
     self.assertTrue(cube.coord('b') in cube.aux_coords)
Ejemplo n.º 18
0
 def test_time_non_dim_coord(self):
     # => rt: 1 fp, t (bounded): 2
     triples = ((5, 0, 2.5), (10, 0, 5))
     cubes = [self._make_cube(fp, rt, t) for fp, rt, t in triples]
     for end_time, cube in zip([5, 10], cubes):
         cube.coord("time").bounds = [0, end_time]
     cube, = iris.cube.CubeList(cubes).merge()
     self.assertCML(cube, ("merge", "time_triple_time_non_dim_coord.cml"), checksum=False)
     # make sure that forecast_period is the dimensioned coordinate (as time becomes an AuxCoord)
     self.assertEqual(cube.coord(dimensions=0, dim_coords=True).name(), "forecast_period")
Ejemplo n.º 19
0
def _pretend_unrotated(cube):
    lat = cube.coord('grid_latitude')
    lon = cube.coord('grid_longitude')
    lat.coord_system.n_pole = iris.coord_systems.GeoPosition(90, 0)
    lon.coord_system.n_pole = iris.coord_systems.GeoPosition(90, 0)
    lat.standard_name = "latitude"
    lon.standard_name = "longitude"
    
    lon.points = lon.points - 360
    if lon.bounds is not None:
        lon.bounds = lon.bounds - 360
Ejemplo n.º 20
0
 def test_coord_attributes(self):
     def custom_coord_callback(cube, field, filename):
         cube.coord('time').attributes['monty'] = 'python'
         cube.coord('time').attributes['brain'] = 'hurts'
     
     # Load slices, decorating a coord with custom attributes
     cubes = iris._load_cubes(self._data_path, callback=custom_coord_callback)
     # Merge
     merged = iris.cube.CubeList._extract_and_merge(cubes, constraints=None, strict=False, merge_unique=False)
     # Check the custom attributes are in the merged cube
     for cube in merged:
         assert(cube.coord('time').attributes['monty'] == 'python')
         assert(cube.coord('time').attributes['brain'] == 'hurts')
Ejemplo n.º 21
0
    def test_coord_attributes(self):
        def custom_coord_callback(cube, field, filename):
            cube.coord("time").attributes["monty"] = "python"
            cube.coord("time").attributes["brain"] = "hurts"

        # Load slices, decorating a coord with custom attributes
        cubes = iris.load_raw(self._data_path, callback=custom_coord_callback)
        # Merge
        merged = iris.cube.CubeList._extract_and_merge(cubes, constraints=None, strict=False, merge_unique=False)
        # Check the custom attributes are in the merged cube
        for cube in merged:
            assert cube.coord("time").attributes["monty"] == "python"
            assert cube.coord("time").attributes["brain"] == "hurts"
Ejemplo n.º 22
0
 def test_concat_2x2d_aux_x_bounds(self):
     cubes = []
     y = (0, 2)
     cube = _make_cube((0, 4), y, 1, aux="x")
     cube.coord("x-aux").guess_bounds()
     cubes.append(cube)
     cube = _make_cube((4, 6), y, 2, aux="x")
     cube.coord("x-aux").guess_bounds()
     cubes.append(cube)
     result = concatenate(cubes)
     self.assertCML(result, ("concatenate", "concat_2x2d_aux_x_bounds.cml"))
     self.assertEqual(len(result), 1)
     self.assertEqual(result[0].shape, (2, 6))
Ejemplo n.º 23
0
    def test_weighted_mean_little(self):
        data = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.float32)
        weights = np.array([[9, 8, 7], [6, 5, 4], [3, 2, 1]], dtype=np.float32)

        cube = iris.cube.Cube(data, long_name="test_data", units="1")
        hcs = iris.coord_systems.GeogCS(6371229)
        lat_coord = iris.coords.DimCoord(np.array([1, 2, 3], dtype=np.float32), long_name="lat", units="1", coord_system=hcs)
        lon_coord = iris.coords.DimCoord(np.array([1, 2, 3], dtype=np.float32), long_name="lon", units="1", coord_system=hcs)
        cube.add_dim_coord(lat_coord, 0)
        cube.add_dim_coord(lon_coord, 1)
        cube.add_aux_coord(iris.coords.AuxCoord(np.arange(3, dtype=np.float32), long_name="dummy", units=1), 1)
        self.assertCML(cube, ('analysis', 'weighted_mean_source.cml'))

        a = cube.collapsed('lat', iris.analysis.MEAN, weights=weights)
        # np.ma.average doesn't apply type promotion rules in some versions,
        # and instead makes the result type float64. To ignore that case we
        # fix up the dtype here if it is promotable from float32. We still want
        # to catch cases where there is a loss of precision however.
        if a.dtype > np.float32:
            cast_data = a.data.astype(np.float32)
            a.replace(cast_data, fill_value=a.fill_value)
        self.assertCMLApproxData(a, ('analysis', 'weighted_mean_lat.cml'))

        b = cube.collapsed(lon_coord, iris.analysis.MEAN, weights=weights)
        if b.dtype > np.float32:
            cast_data = b.data.astype(np.float32)
            b.replace(cast_data, fill_value=b.fill_value)
        b.data = np.asarray(b.data)
        self.assertCMLApproxData(b, ('analysis', 'weighted_mean_lon.cml'))
        self.assertEqual(b.coord('dummy').shape, (1, ))

        # test collapsing multiple coordinates (and the fact that one of the coordinates isn't the same coordinate instance as on the cube)
        c = cube.collapsed([lat_coord[:], lon_coord], iris.analysis.MEAN, weights=weights)
        if c.dtype > np.float32:
            cast_data = c.data.astype(np.float32)
            c.replace(cast_data, fill_value=c.fill_value)
        self.assertCMLApproxData(c, ('analysis', 'weighted_mean_latlon.cml'))
        self.assertEqual(c.coord('dummy').shape, (1, ))

        # Check new coord bounds - made from points
        self.assertArrayEqual(c.coord('lat').bounds, [[1, 3]])

        # Check new coord bounds - made from bounds
        cube.coord('lat').bounds = [[0.5, 1.5], [1.5, 2.5], [2.5, 3.5]]
        c = cube.collapsed(['lat', 'lon'], iris.analysis.MEAN, weights=weights)
        self.assertArrayEqual(c.coord('lat').bounds, [[0.5, 3.5]])
        cube.coord('lat').bounds = None

        # Check there was no residual change
        self.assertCML(cube, ('analysis', 'weighted_mean_source.cml'))
Ejemplo n.º 24
0
Archivo: plot.py Proyecto: bblay/iris
def _fill_orography(cube, coords, mode, vert_plot, horiz_plot, style_args):
    # Find the orography coordinate.
    orography = cube.coord("surface_altitude")

    if coords is not None:
        plot_defn = _get_plot_defn_custom_coords_picked(cube, coords, mode, ndims=2)
    else:
        plot_defn = _get_plot_defn(cube, mode, ndims=2)
    v_coord, u_coord = plot_defn.coords

    # Find which plot coordinate corresponds to the derived altitude, so that
    # we can replace altitude with the surface altitude.
    if v_coord and v_coord.standard_name == "altitude":
        # v is altitude, so plot u and orography with orog in the y direction.
        result = vert_plot(u_coord, orography, style_args)
    elif u_coord and u_coord.standard_name == "altitude":
        # u is altitude, so plot v and orography with orog in the x direction.
        result = horiz_plot(v_coord, orography, style_args)
    else:
        raise ValueError(
            "Plot does not use hybrid height. One of the "
            "coordinates to plot must be altitude, but %s and %s "
            "were given." % (u_coord.name(), v_coord.name())
        )
    return result
Ejemplo n.º 25
0
def _dereference_args(factory, reference_targets, regrid_cache, cube):
    """Converts all the arguments for a factory into concrete coordinates."""
    args = []
    for arg in factory.args:
        if isinstance(arg, Reference):
            if arg.name in reference_targets:
                src = reference_targets[arg.name].as_cube()
                # If necessary, regrid the reference cube to
                # match the grid of this cube.
                src = _ensure_aligned(regrid_cache, src, cube)
                if src is not None:
                    new_coord = iris.coords.AuxCoord(src.data,
                                                     src.standard_name,
                                                     src.long_name,
                                                     src.var_name,
                                                     src.units,
                                                     attributes=src.attributes)
                    dims = [cube.coord_dims(src_coord)[0]
                                for src_coord in src.dim_coords]
                    cube.add_aux_coord(new_coord, dims)
                    args.append(new_coord)
                else:
                    raise _ReferenceError('Unable to regrid reference for'
                                          ' {!r}'.format(arg.name))
            else:
                raise _ReferenceError("The file(s) {{filenames}} don't contain"
                                      " field(s) for {!r}.".format(arg.name))
        else:
            # If it wasn't a Reference, then arg is a dictionary
            # of keyword arguments for cube.coord(...).
            args.append(cube.coord(**arg))
    return args
Ejemplo n.º 26
0
 def test_xy_range_geog_cs_regional(self):
     cube = iris.tests.stock.global_pp()
     cube = cube[10:20, 20:30]
     self.assertFalse(cube.coord('longitude').circular)
     result = iris.analysis.cartography._xy_range(cube)
     np.testing.assert_array_almost_equal(
         result, ((75, 108.75), (42.5, 65)), decimal=0)
Ejemplo n.º 27
0
 def test_forecast_period(self):
     # unhandled unit
     cube = self._load_basic()
     cube.coord("forecast_period").units = cf_units.Unit("years")
     saved_grib = iris.util.create_temp_filename(suffix='.grib2')
     self.assertRaises(iris.exceptions.TranslationError, iris.save, cube, saved_grib)
     os.remove(saved_grib)
Ejemplo n.º 28
0
def _map_common(draw_method_name, arg_func, mode, cube, data, *args, **kwargs):
    """
    Draw the given cube on a map using its points or bounds.

    "Mode" parameter will switch functionality between POINT or BOUND plotting.

    """
    # get the 2d x and 2d y from the CS
    if mode == iris.coords.POINT_MODE:
        x, y = cartography.get_xy_grids(cube)
    else:
        try:
            x, y = cartography.get_xy_contiguous_bounded_grids(cube)
        # Exception translation.
        except iris.exceptions.CoordinateMultiDimError:
            raise ValueError("Could not get XY grid from bounds. "
                             "X or Y coordinate not 1D.")
        except ValueError:
            raise ValueError("Could not get XY grid from bounds. "
                             "X or Y coordinate doesn't have 2 bounds "
                             "per point.")

    # take a copy of the data so that we can make modifications to it
    data = data.copy()

    # If we are global, then append the first column of data the array to the
    # last (and add 360 degrees) NOTE: if it is found that this block of code
    # is useful in anywhere other than this plotting routine, it may be better
    # placed in the CS.
    x_coord = cube.coord(axis="X")
    if getattr(x_coord, 'circular', False):
        _, direction = iris.util.monotonic(x_coord.points,
                                           return_direction=True)
        y = np.append(y, y[:, 0:1], axis=1)
        x = np.append(x, x[:, 0:1] + 360 * direction, axis=1)
        data = ma.concatenate([data, data[:, 0:1]], axis=1)

    # Replace non-cartopy subplot/axes with a cartopy alternative.
    cs = cube.coord_system('CoordSystem')
    if cs:
        cartopy_proj = cs.as_cartopy_projection()
    else:
        cartopy_proj = cartopy.crs.PlateCarree()
    ax = _get_cartopy_axes(cartopy_proj)

    draw_method = getattr(ax, draw_method_name)

    # Set the "from transform" keyword.
    # NB. While cartopy doesn't support spherical contours, just use the
    # projection as the source CRS.
    assert 'transform' not in kwargs, 'Transform keyword is not allowed.'
    kwargs['transform'] = cartopy_proj

    if arg_func is not None:
        new_args, kwargs = arg_func(x, y, data, *args, **kwargs)
    else:
        new_args = (x, y, data) + args

    # Draw the contour lines/filled contours.
    return draw_method(*new_args, **kwargs)
Ejemplo n.º 29
0
 def test_scalar_mask(self):
     # Testing the bug raised in https://github.com/SciTools/iris/pull/123#issuecomment-9309872
     # (the fix workaround for the np.append bug failed for scalar masks) 
     cube = tests.stock.realistic_4d_w_missing_data()
     cube.data = ma.arange(np.product(cube.shape), dtype=np.float32).reshape(cube.shape)
     cube.coord('grid_longitude').circular = True
     # There's no result to test, just make sure we don't cause an exception with the scalar mask.
     _ = iris.analysis.interpolate.linear(cube, [('grid_longitude', 0), ('grid_latitude', 0)])
Ejemplo n.º 30
0
 def test_fancy_indexing_bool_array(self):
     cube = self.cube
     cube.data = np.ma.masked_array(cube.data, mask=cube.data > 100000)
     r = cube[:, cube.coord('grid_latitude').points > 1]
     self.assertEqual(r.shape, (10, 218, 720))
     data = cube.data[:, self.cube.coord('grid_latitude').points > 1, :]
     np.testing.assert_array_equal(data, r.data)
     np.testing.assert_array_equal(data.mask, r.data.mask)
Ejemplo n.º 31
0
def linear(cube, sample_points, extrapolation_mode='linear'):
    """
    Return a cube of the linearly interpolated points given the desired
    sample points.
    
    Given a list of tuple pairs mapping coordinates to their desired
    values, return a cube with linearly interpolated values. If more
    than one coordinate is specified, the linear interpolation will be
    carried out in sequence, thus providing n-linear interpolation
    (bi-linear, tri-linear, etc.).
    
    .. note::

        By definition, linear interpolation requires all coordinates to
        be 1-dimensional.

    .. note::

        If a specified coordinate is single valued its value will be
        extrapolated to the desired sample points by assuming a gradient of
        zero.
    
    Args:
    
    * cube
        The cube to be interpolated.
        
    * sample_points
        List of one or more tuple pairs mapping coordinate to desired
        points to interpolate. Points may be a scalar or a numpy array
        of values.
    
    Kwargs:
    
    * extrapolation_mode - string - one of 'linear', 'nan' or 'error'
    
        * If 'linear' the point will be calculated by extending the
          gradient of closest two points.
        * If 'nan' the extrapolation point will be put as a NAN.
        * If 'error' a value error will be raised notifying of the
          attempted extrapolation.
    
    .. note::

        If the source cube's data, or any of its resampled coordinates,
        have an integer data type they will be promoted to a floating
        point data type in the result.
     
    """
    if not isinstance(cube, iris.cube.Cube):
        raise ValueError('Expecting a cube instance, got %s' % type(cube))

    if isinstance(sample_points, dict):
        warnings.warn(
            'Providing a dictionary to specify points is deprecated. Please provide a list of (coordinate, values) pairs.'
        )
        sample_points = sample_points.items()

    # catch the case where a user passes a single (coord/name, value) pair rather than a list of pairs
    if sample_points and not (
            isinstance(sample_points[0], collections.Container)
            and not isinstance(sample_points[0], basestring)):
        raise TypeError(
            'Expecting the sample points to be a list of tuple pairs representing (coord, points), got a list of %s.'
            % type(sample_points[0]))

    points = []
    for (coord, values) in sample_points:
        if isinstance(coord, basestring):
            coord = cube.coord(coord)
        else:
            coord = cube.coord(coord=coord)
        points.append((coord, values))
    sample_points = points

    if len(sample_points) == 0:
        raise ValueError(
            'Expecting a non-empty list of coord value pairs, got %r.' %
            sample_points)

    if cube.data.dtype.kind == 'i':
        raise ValueError(
            "Cannot linearly interpolate a cube which has integer type data. Consider casting the "
            "cube's data to floating points in order to continue.")

    bounds_error = (extrapolation_mode == 'error')

    # Handle an over-specified points_dict or a specification which does not describe a data dimension
    data_dimensions_requested = []
    for coord, values in sample_points:
        if coord.ndim > 1:
            raise ValueError('Cannot linearly interpolate over {!r} as it is'
                             ' multi-dimensional.'.format(coord.name()))
        data_dim = cube.coord_dims(coord)
        if not data_dim:
            raise ValueError('Requested a point over a coordinate which does'
                             ' not describe a dimension: {!r}.'.format(
                                 coord.name()))
        else:
            data_dim = data_dim[0]
        if data_dim in data_dimensions_requested:
            raise ValueError('Requested a point which over specifies a'
                             ' dimension: {!r}. '.format(coord.name()))
        data_dimensions_requested.append(data_dim)

    # Iterate over all of the requested keys in the given points_dict calling this routine repeatedly.
    if len(sample_points) > 1:
        result = cube
        for coord, cells in sample_points:
            result = linear(result, [(coord, cells)],
                            extrapolation_mode=extrapolation_mode)
        return result

    else:
        # Now we must be down to a single sample coordinate and its
        # values.
        src_coord, requested_points = sample_points[0]
        sample_values = np.array(requested_points)

        # 1) Define the interpolation characteristics.

        # Get the sample dimension (which we have already tested is not None)
        sample_dim = cube.coord_dims(src_coord)[0]

        # Construct source data & source coordinate values suitable for
        # SciPy's interp1d.
        if getattr(src_coord, 'circular', False):
            coord_slice_in_cube = [slice(None, None)] * cube.data.ndim
            coord_slice_in_cube[sample_dim] = slice(0, 1)
            modulus = np.array(src_coord.units.modulus or 0,
                               dtype=src_coord.dtype)
            src_points = np.append(src_coord.points,
                                   src_coord.points[0] + modulus)

            # TODO: Restore this code after resolution of the following issue:
            # https://github.com/numpy/numpy/issues/478
            #            data = np.append(cube.data,
            #                             cube.data[tuple(coord_slice_in_cube)],
            #                             axis=sample_dim)
            # This is the alternative, temporary workaround.
            # It doesn't use append on an nD mask.
            if (not isinstance(cube.data, ma.MaskedArray)
                    or not isinstance(cube.data.mask, np.ndarray)
                    or len(cube.data.mask.shape) == 0):
                data = np.append(cube.data,
                                 cube.data[tuple(coord_slice_in_cube)],
                                 axis=sample_dim)
            else:
                new_data = np.append(
                    cube.data.data,
                    cube.data.data[tuple(coord_slice_in_cube)],
                    axis=sample_dim)
                new_mask = np.append(
                    cube.data.mask,
                    cube.data.mask[tuple(coord_slice_in_cube)],
                    axis=sample_dim)
                data = ma.array(new_data, mask=new_mask)
        else:
            src_points = src_coord.points
            data = cube.data

        # Map all the requested values into the range of the source
        # data (centered over the centre of the source data to allow
        # extrapolation where required).
        src_axis = iris.util.guess_coord_axis(src_coord)
        if src_axis == 'X' and src_coord.units.modulus:
            modulus = src_coord.units.modulus
            offset = (src_points.max() + src_points.min() - modulus) * 0.5
            sample_values = ((sample_values - offset) % modulus) + offset

        if len(src_points) == 1:
            if extrapolation_mode == 'error' and \
                    np.any(sample_values != src_points):
                raise ValueError('Attempting to extrapolate from a single '
                                 'point with extrapolation mode set '
                                 'to {!r}.'.format(extrapolation_mode))
            direction = 0

            def interpolate(fx, new_x, axis=None, **kwargs):
                # All kwargs other than axis are ignored.
                if axis is None:
                    axis = -1
                new_x = np.array(new_x)
                new_shape = list(fx.shape)
                new_shape[axis] = new_x.size
                fx = np.broadcast_arrays(fx, np.empty(new_shape))[0].copy()
                if extrapolation_mode == 'nan':
                    indices = [slice(None)] * fx.ndim
                    indices[axis] = new_x != src_points
                    fx[tuple(indices)] = np.nan
                # If new_x is a scalar, then remove the dimension from fx.
                if not new_x.shape:
                    del new_shape[axis]
                    fx.shape = new_shape
                return fx
        else:
            monotonic, direction = iris.util.monotonic(src_points,
                                                       return_direction=True)
            if not monotonic:
                raise ValueError('Unable to linearly interpolate this '
                                 'cube as the coordinate {!r} is not '
                                 'monotonic'.format(src_coord.name()))

            # SciPy's interp1d requires monotonic increasing coord values.
            if direction == -1:
                src_points = iris.util.reverse(src_points, axes=0)
                data = iris.util.reverse(data, axes=sample_dim)

            # Wrap it all up in a function which makes the right kind of
            # interpolator/extrapolator.
            # NB. This uses a closure to capture the values of src_points,
            # bounds_error, and extrapolation_mode.
            def interpolate(fx, new_x, **kwargs):
                # SciPy's interp1d needs float values, so if we're given
                # integer values, convert them to the smallest possible
                # float dtype that can accurately preserve the values.
                if fx.dtype.kind == 'i':
                    fx = fx.astype(np.promote_types(fx.dtype, np.float16))
                x = src_points.astype(fx.dtype)
                interpolator = interp1d(x,
                                        fx,
                                        kind='linear',
                                        bounds_error=bounds_error,
                                        **kwargs)
                if extrapolation_mode == 'linear':
                    interpolator = Linear1dExtrapolator(interpolator)
                new_fx = interpolator(np.array(new_x, dtype=fx.dtype))
                return new_fx

        # 2) Interpolate the data and produce our new Cube.
        data = interpolate(data, sample_values, axis=sample_dim, copy=False)
        new_cube = iris.cube.Cube(data)
        new_cube.metadata = cube.metadata

        # If requested_points is an array scalar then `new_cube` will
        # have one less dimension than `cube`. (The `sample_dim`
        # dimension will vanish.) In which case we build a mapping from
        # `cube` dimensions to `new_cube` dimensions.
        dim_mapping = None
        if new_cube.ndim != cube.ndim:
            dim_mapping = {i: i for i in range(sample_dim)}
            dim_mapping[sample_dim] = None
            for i in range(sample_dim + 1, cube.ndim):
                dim_mapping[i] = i - 1

        # 2) Copy/interpolate the coordinates.
        for dim_coord in cube.dim_coords:
            dims = cube.coord_dims(dim_coord)
            if sample_dim in dims:
                new_coord = _resample_coord(dim_coord, src_coord, direction,
                                            requested_points, interpolate)
            else:
                new_coord = dim_coord.copy()
            if dim_mapping:
                dims = [
                    dim_mapping[dim] for dim in dims
                    if dim_mapping[dim] is not None
                ]
            if isinstance(new_coord, iris.coords.DimCoord) and dims:
                new_cube.add_dim_coord(new_coord, dims)
            else:
                new_cube.add_aux_coord(new_coord, dims)

        for coord in cube.aux_coords:
            dims = cube.coord_dims(coord)
            if sample_dim in dims:
                new_coord = _resample_coord(coord, src_coord, direction,
                                            requested_points, interpolate)
            else:
                new_coord = coord.copy()
            if dim_mapping:
                dims = [
                    dim_mapping[dim] for dim in dims
                    if dim_mapping[dim] is not None
                ]
            new_cube.add_aux_coord(new_coord, dims)

        return new_cube
Ejemplo n.º 32
0
 def as_coord(coord):
     coord = cube.coord(coord)
     return coord
Ejemplo n.º 33
0
 def as_coord(coord):
     if isinstance(coord, basestring):
         coord = cube.coord(name=coord)
     else:
         coord = cube.coord(coord=coord)
     return coord
Ejemplo n.º 34
0
def regrid_time(cube, frequency):
    """
    Align time axis for cubes so they can be subtracted.

    Operations on time units, time points and auxiliary
    coordinates so that any cube from cubes can be subtracted from any
    other cube from cubes. Currently this function supports
    yearly (frequency=yr), monthly (frequency=mon),
    daily (frequency=day), 6-hourly (frequency=6hr),
    3-hourly (frequency=3hr) and hourly (frequency=1hr) data time frequencies.

    Parameters
    ----------
    cube: iris.cube.Cube
        input cube.
    frequency: str
        data frequency: mon, day, 1hr, 3hr or 6hr

    Returns
    -------
    iris.cube.Cube
        cube with converted time axis and units.
    """
    # standardize time points
    time_c = [cell.point for cell in cube.coord('time').cells()]
    if frequency == 'yr':
        time_cells = [
            datetime.datetime(t.year, 7, 1, 0, 0, 0) for t in time_c
        ]
    elif frequency == 'mon':
        time_cells = [
            datetime.datetime(t.year, t.month, 15, 0, 0, 0) for t in time_c
        ]
    elif frequency == 'day':
        time_cells = [
            datetime.datetime(t.year, t.month, t.day, 0, 0, 0) for t in time_c
        ]
    elif frequency == '1hr':
        time_cells = [
            datetime.datetime(t.year, t.month, t.day, t.hour, 0, 0)
            for t in time_c
        ]
    elif frequency == '3hr':
        time_cells = [
            datetime.datetime(t.year, t.month, t.day, t.hour - t.hour % 3, 0,
                              0) for t in time_c
        ]
    elif frequency == '6hr':
        time_cells = [
            datetime.datetime(t.year, t.month, t.day, t.hour - t.hour % 6, 0,
                              0) for t in time_c
        ]

    cube.coord('time').points = [
        cube.coord('time').units.date2num(cl)
        for cl in time_cells]

    # uniformize bounds
    cube.coord('time').bounds = None
    cube.coord('time').guess_bounds()

    # remove aux coords that will differ
    reset_aux = ['day_of_month', 'day_of_year']
    for auxcoord in cube.aux_coords:
        if auxcoord.long_name in reset_aux:
            cube.remove_coord(auxcoord)

    # re-add the converted aux coords
    iris.coord_categorisation.add_day_of_month(cube,
                                               cube.coord('time'),
                                               name='day_of_month')
    iris.coord_categorisation.add_day_of_year(cube,
                                              cube.coord('time'),
                                              name='day_of_year')

    return cube
Ejemplo n.º 35
0
def extract_time(cube, start_year, start_month, start_day, end_year, end_month,
                 end_day):
    """
    Extract a time range from a cube.

    Given a time range passed in as a series of years, months and days, it
    returns a time-extracted cube with data only within the specified
    time range.

    Parameters
    ----------
    cube: iris.cube.Cube
        input cube.
    start_year: int
        start year
    start_month: int
        start month
    start_day: int
        start day
    end_year: int
        end year
    end_month: int
        end month
    end_day: int
        end day

    Returns
    -------
    iris.cube.Cube
        Sliced cube.

    Raises
    ------
    ValueError
        if time ranges are outside the cube time limits

    """
    time_coord = cube.coord('time')
    time_units = time_coord.units
    if time_units.calendar == '360_day':
        if start_day > 30:
            start_day = 30
        if end_day > 30:
            end_day = 30
    t_1 = PartialDateTime(
        year=int(start_year), month=int(start_month), day=int(start_day))
    t_2 = PartialDateTime(
        year=int(end_year), month=int(end_month), day=int(end_day))

    constraint = iris.Constraint(
        time=lambda t: t_1 <= t.point < t_2)

    cube_slice = cube.extract(constraint)
    if cube_slice is None:
        raise ValueError(
            f"Time slice {start_year:0>4d}-{start_month:0>2d}-{start_day:0>2d}"
            f" to {end_year:0>4d}-{end_month:0>2d}-{end_day:0>2d} is outside "
            f"cube time bounds {time_coord.cell(0)} to {time_coord.cell(-1)}."
        )

    # Issue when time dimension was removed when only one point as selected.
    if cube_slice.ndim != cube.ndim:
        if cube_slice.coord('time') == time_coord:
            logger.debug('No change needed to time.')
            return cube

    return cube_slice
Ejemplo n.º 36
0
def cube_delta(cube, coord):
    """
    Given a cube calculate the difference between each value in the
    given coord's direction.


    Args:

    * coord
        either a Coord instance or the unique name of a coordinate in the cube.
        If a Coord instance is provided, it does not necessarily have to
        exist in the cube.

    Example usage::

        change_in_temperature_wrt_pressure = \
cube_delta(temperature_cube, 'pressure')

    .. note:: Missing data support not yet implemented.

    """
    # handle the case where a user passes a coordinate name
    if isinstance(coord, basestring):
        coord = cube.coord(coord)

    if coord.ndim != 1:
        raise iris.exceptions.CoordinateMultiDimError(coord)

    # Try and get a coord dim
    delta_dims = cube.coord_dims(coord.name())
    if ((coord.shape[0] == 1 and not getattr(coord, 'circular', False))
            or not delta_dims):
        raise ValueError('Cannot calculate delta over {!r} as it has '
                         'length of 1.'.format(coord.name()))
    delta_dim = delta_dims[0]

    # Calculate the actual delta, taking into account whether the given
    # coordinate is circular.
    delta_cube_data = delta(cube.data,
                            delta_dim,
                            circular=getattr(coord, 'circular', False))

    # If the coord/dim is circular there is no change in cube shape
    if getattr(coord, 'circular', False):
        delta_cube = cube.copy(data=delta_cube_data)
    else:
        # Subset the cube to the appropriate new shape by knocking off
        # the last row of the delta dimension.
        subset_slice = [slice(None, None)] * cube.ndim
        subset_slice[delta_dim] = slice(None, -1)
        delta_cube = cube[tuple(subset_slice)]
        delta_cube.data = delta_cube_data

    # Replace the delta_dim coords with midpoints
    # (no shape change if circular).
    for cube_coord in cube.coords(dimensions=delta_dim):
        delta_cube.replace_coord(
            _construct_midpoint_coord(cube_coord,
                                      circular=getattr(coord, 'circular',
                                                       False)))

    delta_cube.rename('change_in_{}_wrt_{}'.format(delta_cube.name(),
                                                   coord.name()))

    return delta_cube
Ejemplo n.º 37
0
    def test_weighted_mean_little(self):
        data = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.float32)
        weights = np.array([[9, 8, 7], [6, 5, 4], [3, 2, 1]], dtype=np.float32)

        cube = iris.cube.Cube(data, long_name="test_data", units="1")
        hcs = iris.coord_systems.GeogCS(6371229)
        lat_coord = iris.coords.DimCoord(np.array([1, 2, 3], dtype=np.float32),
                                         long_name="lat",
                                         units="1",
                                         coord_system=hcs)
        lon_coord = iris.coords.DimCoord(np.array([1, 2, 3], dtype=np.float32),
                                         long_name="lon",
                                         units="1",
                                         coord_system=hcs)
        cube.add_dim_coord(lat_coord, 0)
        cube.add_dim_coord(lon_coord, 1)
        cube.add_aux_coord(
            iris.coords.AuxCoord(np.arange(3, dtype=np.float32),
                                 long_name="dummy",
                                 units=1), 1)
        self.assertCML(cube, ('analysis', 'weighted_mean_source.cml'))

        a = cube.collapsed('lat', iris.analysis.MEAN, weights=weights)
        # np.ma.average doesn't apply type promotion rules in some versions,
        # and instead makes the result type float64. To ignore that case we
        # fix up the dtype here if it is promotable from float32. We still want
        # to catch cases where there is a loss of precision however.
        if a.dtype > np.float32:
            cast_data = a.data.astype(np.float32)
            a.replace(cast_data, fill_value=a.fill_value)
        self.assertCMLApproxData(a, ('analysis', 'weighted_mean_lat.cml'))

        b = cube.collapsed(lon_coord, iris.analysis.MEAN, weights=weights)
        if b.dtype > np.float32:
            cast_data = b.data.astype(np.float32)
            b.replace(cast_data, fill_value=b.fill_value)
        b.data = np.asarray(b.data)
        self.assertCMLApproxData(b, ('analysis', 'weighted_mean_lon.cml'))
        self.assertEqual(b.coord('dummy').shape, (1, ))

        # test collapsing multiple coordinates (and the fact that one of the coordinates isn't the same coordinate instance as on the cube)
        c = cube.collapsed([lat_coord[:], lon_coord],
                           iris.analysis.MEAN,
                           weights=weights)
        if c.dtype > np.float32:
            cast_data = c.data.astype(np.float32)
            c.replace(cast_data, fill_value=c.fill_value)
        self.assertCMLApproxData(c, ('analysis', 'weighted_mean_latlon.cml'))
        self.assertEqual(c.coord('dummy').shape, (1, ))

        # Check new coord bounds - made from points
        self.assertArrayEqual(c.coord('lat').bounds, [[1, 3]])

        # Check new coord bounds - made from bounds
        cube.coord('lat').bounds = [[0.5, 1.5], [1.5, 2.5], [2.5, 3.5]]
        c = cube.collapsed(['lat', 'lon'], iris.analysis.MEAN, weights=weights)
        self.assertArrayEqual(c.coord('lat').bounds, [[0.5, 3.5]])
        cube.coord('lat').bounds = None

        # Check there was no residual change
        self.assertCML(cube, ('analysis', 'weighted_mean_source.cml'))
Ejemplo n.º 38
0
def _nearest_neighbour_indices_ndcoords(cube, sample_point, cache=None):
    """
    See documentation for :func:`iris.analysis.interpolate.nearest_neighbour_indices`.

    This function is adapted for points sampling a multi-dimensional coord,
    and can currently only do nearest neighbour interpolation.

    Because this function can be slow for multidimensional coordinates,
    a 'cache' dictionary can be provided by the calling code.

    """

    # Developer notes:
    # A "sample space cube" is made which only has the coords and dims we are sampling on.
    # We get the nearest neighbour using this sample space cube.

    if isinstance(sample_point, dict):
        warnings.warn('Providing a dictionary to specify points is deprecated. Please provide a list of (coordinate, values) pairs.')
        sample_point = sample_point.items()

    if sample_point:
        try:
            coord, value = sample_point[0]
        except ValueError:
            raise ValueError('Sample points must be a list of (coordinate, value) pairs. Got %r.' % sample_point)

    # Convert names to coords in sample_point
    point = []
    ok_coord_ids = set(map(id, cube.dim_coords + cube.aux_coords))
    for coord, value in sample_point:
        if isinstance(coord, basestring):
            coord = cube.coord(coord)
        else:
            coord = cube.coord(coord)
        if id(coord) not in ok_coord_ids:
            msg = ('Invalid sample coordinate {!r}: derived coordinates are'
                   ' not allowed.'.format(coord.name()))
            raise ValueError(msg)
        point.append((coord, value))

    # Reformat sample_point for use in _cartesian_sample_points(), below.
    sample_point = np.array([[value] for coord, value in point])
    sample_point_coords = [coord for coord, value in point]
    sample_point_coord_names = [coord.name() for coord, value in point]

    # Which dims are we sampling?
    sample_dims = set()
    for coord in sample_point_coords:
        for dim in cube.coord_dims(coord):
            sample_dims.add(dim)
    sample_dims = sorted(list(sample_dims))

    # Extract a sub cube that lives in just the sampling space.
    sample_space_slice = [0] * cube.ndim
    for sample_dim in sample_dims:
        sample_space_slice[sample_dim] = slice(None, None)
    sample_space_slice = tuple(sample_space_slice)
    sample_space_cube = cube[sample_space_slice]

    #...with just the sampling coords
    for coord in sample_space_cube.coords():
        if not coord.name() in sample_point_coord_names:
            sample_space_cube.remove_coord(coord)

    # Order the sample point coords according to the sample space cube coords
    sample_space_coord_names = [coord.name() for coord in sample_space_cube.coords()]
    new_order = [sample_space_coord_names.index(name) for name in sample_point_coord_names]
    sample_point = np.array([sample_point[i] for i in new_order])
    sample_point_coord_names = [sample_point_coord_names[i] for i in new_order]

    # Convert the sample point to cartesian coords.
    # If there is no latlon within the coordinate there will be no change.
    # Otherwise, geographic latlon is replaced with cartesian xyz.
    cartesian_sample_point = _cartesian_sample_points(sample_point, sample_point_coord_names)[0]

    sample_space_coords = sample_space_cube.dim_coords + sample_space_cube.aux_coords
    sample_space_coords_and_dims = [(coord, sample_space_cube.coord_dims(coord)) for coord in sample_space_coords]

    if cache is not None and cube in cache:
        kdtree = cache[cube]
    else:
        # Create a "sample space position" for each datum: sample_space_data_positions[coord_index][datum_index]
        sample_space_data_positions = np.empty((len(sample_space_coords_and_dims), sample_space_cube.data.size), dtype=float)
        for d, ndi in enumerate(np.ndindex(sample_space_cube.data.shape)):
            for c, (coord, coord_dims) in enumerate(sample_space_coords_and_dims):
                # Index of this datum along this coordinate (could be nD).
                keys = tuple(ndi[ind] for ind in coord_dims) if coord_dims else slice(None, None)
                # Position of this datum along this coordinate.
                sample_space_data_positions[c][d] = coord.points[keys]

        # Convert to cartesian coordinates. Flatten for kdtree compatibility.
        cartesian_space_data_coords = _cartesian_sample_points(sample_space_data_positions, sample_point_coord_names)

        # Get the nearest datum index to the sample point. This is the goal of the function.
        kdtree = scipy.spatial.cKDTree(cartesian_space_data_coords)

    cartesian_distance, datum_index = kdtree.query(cartesian_sample_point)
    sample_space_ndi = np.unravel_index(datum_index, sample_space_cube.data.shape)

    # Turn sample_space_ndi into a main cube slice.
    # Map sample cube to main cube dims and leave the rest as a full slice.
    main_cube_slice = [slice(None, None)] * cube.ndim
    for sample_coord, sample_coord_dims in sample_space_coords_and_dims:
        # Find the coord in the main cube
        main_coord = cube.coord(sample_coord.name())
        main_coord_dims = cube.coord_dims(main_coord)
        # Mark the nearest data index/indices with respect to this coord
        for sample_i, main_i in zip(sample_coord_dims, main_coord_dims):
            main_cube_slice[main_i] = sample_space_ndi[sample_i]


    # Update cache
    if cache is not None:
        cache[cube] = kdtree

    return tuple(main_cube_slice)
Ejemplo n.º 39
0
def differentiate(cube, coord_to_differentiate):
    r"""
    Calculate the differential of a given cube with respect to the
    coord_to_differentiate.

    Args:

    * coord_to_differentiate:
        Either a Coord instance or the unique name of a coordinate which
        exists in the cube.
        If a Coord instance is provided, it does not necessarily have to
        exist on the cube.

    Example usage::

        u_wind_acceleration = differentiate(u_wind_cube, 'forecast_time')

    The algorithm used is equivalent to:

    .. math::

        d_i = \frac{v_{i+1}-v_i}{c_{i+1}-c_i}

    Where ``d`` is the differential, ``v`` is the data value, ``c`` is
    the coordinate value and ``i`` is the index in the differential
    direction. Hence, in a normal situation if a cube has a shape
    (x: n; y: m) differentiating with respect to x will result in a cube
    of shape (x: n-1; y: m) and differentiating with respect to y will
    result in (x: n; y: m-1). If the coordinate to differentiate is
    :attr:`circular <iris.coords.DimCoord.circular>` then the resultant
    shape will be the same as the input cube.

    In the returned cube the `coord_to_differentiate` object is
    redefined such that the output coordinate values are set to the
    averages of the original coordinate values (i.e. the mid-points).
    Similarly, the output lower bounds values are set to the averages of
    the original lower bounds values and the output upper bounds values
    are set to the averages of the original upper bounds values. In more
    formal terms:

    * `C[i] = (c[i] + c[i+1]) / 2`
    * `B[i, 0] = (b[i, 0] + b[i+1, 0]) / 2`
    * `B[i, 1] = (b[i, 1] + b[i+1, 1]) / 2`

    where `c` and `b` represent the input coordinate values and bounds,
    and `C` and `B` the output coordinate values and bounds.

    .. note:: Difference method used is the same as :func:`cube_delta`
    and therefore has the same limitations.

    .. note:: Spherical differentiation does not occur in this routine.

    """
    # Get the delta cube in the required differential direction.
    # This operation results in a copy of the original cube.
    delta_cube = cube_delta(cube, coord_to_differentiate)

    if isinstance(coord_to_differentiate, basestring):
        coord = cube.coord(coord_to_differentiate)
    else:
        coord = coord_to_differentiate

    delta_coord = _construct_delta_coord(coord)
    delta_dim = cube.coord_dims(coord.name())[0]

    # calculate delta_cube / delta_coord to give the differential.
    delta_cube = iris.analysis.maths.divide(delta_cube, delta_coord, delta_dim)

    # Update the standard name
    delta_cube.rename('derivative_of_{}_wrt_{}'.format(cube.name(),
                                                       coord.name()))
    return delta_cube
Ejemplo n.º 40
0
def nearest_neighbour_indices(cube, sample_points):
    """
    Returns the indices to select the data value(s) closest to the given coordinate point values.

    The sample_points mapping does not have to include coordinate values corresponding to all data
    dimensions. Any dimensions unspecified will default to a full slice.

    For example:

        >>> cube = iris.load_cube(iris.sample_data_path('ostia_monthly.nc'))
        >>> iris.analysis.interpolate.nearest_neighbour_indices(cube, [('latitude', 0), ('longitude', 10)])
        (slice(None, None, None), 9, 12)
        >>> iris.analysis.interpolate.nearest_neighbour_indices(cube, [('latitude', 0)])
        (slice(None, None, None), 9, slice(None, None, None))

    Args:

    * cube:
        An :class:`iris.cube.Cube`.
    * sample_points
        A list of tuple pairs mapping coordinate instances or unique coordinate names in the cube to point values.

    Returns:
        The tuple of indices which will select the point in the cube closest to the supplied coordinate values.

    .. note::

        Nearest neighbour interpolation of multidimensional coordinates is not
        yet supported.

    """
    if isinstance(sample_points, dict):
        warnings.warn('Providing a dictionary to specify points is deprecated. Please provide a list of (coordinate, values) pairs.')
        sample_points = sample_points.items()

    if sample_points:
        try:
            coord, values = sample_points[0]
        except ValueError:
            raise ValueError('Sample points must be a list of (coordinate, value) pairs. Got %r.' % sample_points)

    points = []
    for coord, values in sample_points:
        if isinstance(coord, basestring):
            coord = cube.coord(coord)
        else:
            coord = cube.coord(coord)
        points.append((coord, values))
    sample_points = points

    # Build up a list of indices to span the cube.
    indices = [slice(None, None)] * cube.ndim
    
    # Build up a dictionary which maps the cube's data dimensions to a list (which will later
    # be populated by coordinates in the sample points list)
    dim_to_coord_map = {}
    for i in range(cube.ndim):
        dim_to_coord_map[i] = []

    # Iterate over all of the specifications provided by sample_points
    for coord, point in sample_points:
        data_dim = cube.coord_dims(coord)

        # If no data dimension then we don't need to make any modifications to indices.
        if not data_dim:
            continue
        elif len(data_dim) > 1:
            raise iris.exceptions.CoordinateMultiDimError("Nearest neighbour interpolation of multidimensional "
                                                          "coordinates is not supported.")
        data_dim = data_dim[0]

        dim_to_coord_map[data_dim].append(coord)

        #calculate the nearest neighbour
        min_index = coord.nearest_neighbour_index(point)

        if getattr(coord, 'circular', False):
            warnings.warn("Nearest neighbour on a circular coordinate may not be picking the nearest point.", DeprecationWarning)

        # If the dimension has already been interpolated then assert that the index from this coordinate
        # agrees with the index already calculated, otherwise we have a contradicting specification
        if indices[data_dim] != slice(None, None) and min_index != indices[data_dim]:
            raise ValueError('The coordinates provided (%s) over specify dimension %s.' %
                                        (', '.join([coord.name() for coord in dim_to_coord_map[data_dim]]), data_dim))

        indices[data_dim] = min_index

    return tuple(indices)
Ejemplo n.º 41
0
 def test_mismatched_coord_systems(self):
     cube = low_res_4d()
     cube.coord("grid_longitude").coord_system = None
     with self.assertRaises(ValueError):
         project(cube, ROBINSON)
Ejemplo n.º 42
0
 def custom_coord_callback(cube, field, filename):
     cube.coord('time').attributes['monty'] = 'python'
     cube.coord('time').attributes['brain'] = 'hurts'
Ejemplo n.º 43
0
 def _cube_with_time_bounds(self):
     cube = self._cube_with_pressure()
     cube.coord("time").bounds = np.array([[0, 100]])
     return cube
Ejemplo n.º 44
0
def timeseries_filter(cube, window, span,
                      filter_type='lowpass', filter_stats='sum'):
    """
    Apply a timeseries filter.

    Method borrowed from `iris example
    <https://scitools.org.uk/iris/docs/latest/examples/General/
    SOI_filtering.html?highlight=running%20mean>`_

    Apply each filter using the rolling_window method used with the weights
    keyword argument. A weighted sum is required because the magnitude of
    the weights are just as important as their relative sizes.

    See also the `iris rolling window
    <https://scitools.org.uk/iris/docs/v2.0/iris/iris/
    cube.html#iris.cube.Cube.rolling_window>`_

    Parameters
    ----------
    cube: iris.cube.Cube
        input cube.
    window: int
        The length of the filter window (in units of cube time coordinate).
    span: int
        Number of months/days (depending on data frequency) on which
        weights should be computed e.g. 2-yearly: span = 24 (2 x 12 months).
        Span should have same units as cube time coordinate.
    filter_type: str, optional
        Type of filter to be applied; default 'lowpass'.
        Available types: 'lowpass'.
    filter_stats: str, optional
        Type of statistic to aggregate on the rolling window; default 'sum'.
        Available operators: 'mean', 'median', 'std_dev', 'sum', 'min', 'max'

    Returns
    -------
    iris.cube.Cube
        cube time-filtered using 'rolling_window'.

    Raises
    ------
    iris.exceptions.CoordinateNotFoundError:
        Cube does not have time coordinate.
    NotImplementedError:
        If filter_type is not implemented.
    """
    try:
        cube.coord('time')
    except iris.exceptions.CoordinateNotFoundError:
        logger.error("Cube %s does not have time coordinate", cube)
        raise

    # Construct weights depending on frequency
    # TODO implement more filters!
    supported_filters = ['lowpass', ]
    if filter_type in supported_filters:
        if filter_type == 'lowpass':
            wgts = low_pass_weights(window, 1. / span)
    else:
        raise NotImplementedError(
            "Filter type {} not implemented, \
            please choose one of {}".format(filter_type,
                                            ", ".join(supported_filters)))

    # Apply filter
    aggregation_operator = get_iris_analysis_operation(filter_stats)
    cube = cube.rolling_window('time',
                               aggregation_operator,
                               len(wgts),
                               weights=wgts)

    return cube
Ejemplo n.º 45
0
 def test_unmappable(self):
     cube = self.cube[0, 0]
     cube.coord('grid_longitude').standard_name = None
     iplt.contourf(cube)
     self.check_graphic()
Ejemplo n.º 46
0
 def test_non_latlon(self):
     cube = self._load_basic()
     cube.coord(dimensions=[0]).coord_system = None
     saved_grib = iris.util.create_temp_filename(suffix='.grib2')
     self.assertRaises(iris.exceptions.TranslationError, iris.save, cube, saved_grib)
     os.remove(saved_grib)