コード例 #1
0
def _vertical_interpolate(cube, src_levels, levels, interpolation,
                          extrapolation):
    """Perform vertical interpolation."""
    # Determine the source levels and axis for vertical interpolation.
    z_axis, = cube.coord_dims(cube.coord(axis='z', dim_coords=True))

    # Broadcast the 1d source cube vertical coordinate to fully
    # describe the spatial extent that will be interpolated.
    src_levels_broadcast = broadcast_to_shape(src_levels.points, cube.shape,
                                              cube.coord_dims(src_levels))

    # force mask onto data as nan's
    if np.ma.is_masked(cube.data):
        cube.data[cube.data.mask] = np.nan

    # Now perform the actual vertical interpolation.
    new_data = stratify.interpolate(levels,
                                    src_levels_broadcast,
                                    cube.data,
                                    axis=z_axis,
                                    interpolation=interpolation,
                                    extrapolation=extrapolation)

    # Calculate the mask based on the any NaN values in the interpolated data.
    mask = np.isnan(new_data)

    if np.any(mask):
        # Ensure that the data is masked appropriately.
        new_data = np.ma.array(new_data, mask=mask, fill_value=_MDI)

    # Construct the resulting cube with the interpolated data.
    return _create_cube(cube, new_data, src_levels, levels.astype(float))
コード例 #2
0
 def test_masked_degenerate(self):
     # masked arrays can have degenerate masks too
     a = np.random.random([2, 3])
     m = ma.array(a)
     b = broadcast_to_shape(m, (5, 3, 4, 2), (3, 1))
     for i in xrange(5):
         for j in xrange(4):
             self.assertMaskedArrayEqual(b[i, :, j, :].T, m)
コード例 #3
0
 def test_masked_degenerate(self):
     # masked arrays can have degenerate masks too
     a = np.random.random([2, 3])
     m = ma.array(a)
     b = broadcast_to_shape(m, (5, 3, 4, 2), (3, 1))
     for i in xrange(5):
         for j in xrange(4):
             self.assertMaskedArrayEqual(b[i, :, j, :].T, m)
コード例 #4
0
 def test_added_dimensions_transpose(self):
     # adding dimensions and having the dimensions of the input
     # transposed
     a = np.random.random([2, 3])
     b = broadcast_to_shape(a, (5, 3, 4, 2), (3, 1))
     for i in xrange(5):
         for j in xrange(4):
             self.assertArrayEqual(b[i, :, j, :].T, a)
コード例 #5
0
 def test_masked(self):
     # masked arrays are also accepted
     a = np.random.random([2, 3])
     m = ma.array(a, mask=[[0, 1, 0], [0, 1, 1]])
     b = broadcast_to_shape(m, (5, 3, 4, 2), (3, 1))
     for i in xrange(5):
         for j in xrange(4):
             self.assertMaskedArrayEqual(b[i, :, j, :].T, m)
コード例 #6
0
 def test_masked(self):
     # masked arrays are also accepted
     a = np.random.random([2, 3])
     m = ma.array(a, mask=[[0, 1, 0], [0, 1, 1]])
     b = broadcast_to_shape(m, (5, 3, 4, 2), (3, 1))
     for i in xrange(5):
         for j in xrange(4):
             self.assertMaskedArrayEqual(b[i, :, j, :].T, m)
コード例 #7
0
 def test_added_dimensions(self):
     # adding two dimensions, on at the front and one in the middle of
     # the existing dimensions
     a = np.random.random([2, 3])
     b = broadcast_to_shape(a, (5, 2, 4, 3), (1, 3))
     for i in xrange(5):
         for j in xrange(4):
             self.assertArrayEqual(b[i, :, j, :], a)
コード例 #8
0
 def test_added_dimensions_transpose(self):
     # adding dimensions and having the dimensions of the input
     # transposed
     a = np.random.random([2, 3])
     b = broadcast_to_shape(a, (5, 3, 4, 2), (3, 1))
     for i in xrange(5):
         for j in xrange(4):
             self.assertArrayEqual(b[i, :, j, :].T, a)
コード例 #9
0
 def test_added_dimensions(self):
     # adding two dimensions, on at the front and one in the middle of
     # the existing dimensions
     a = np.random.random([2, 3])
     b = broadcast_to_shape(a, (5, 2, 4, 3), (1, 3))
     for i in xrange(5):
         for j in xrange(4):
             self.assertArrayEqual(b[i, :, j, :], a)
コード例 #10
0
 def _compute_mean(data, weights):
     mapping = set(
         data.coord_dims('latitude') + data.coord_dims('longitude'))
     with warnings.catch_warnings():
         warnings.simplefilter("ignore")
         return data.collapsed(
             ('latitude', 'longitude'),
             iris.analysis.MEAN,
             weights=broadcast_to_shape(weights, data.shape, mapping))
コード例 #11
0
def coord_to_cube(cube, coord):
    """
    Convert coordinate points to a cube of the same dimension as the given cube.

    Parameters
    ----------
    cube: iris.cube.Cube
        Cube containing the coordinate to be broadcast.
    coord: str or iris.coords.Coord
        Coordinate to be broadcast

    Returns
    -------
    iris.cube.Cube
        Cube of broadcast coordinate
    """
    if isinstance(coord, str):
        _coord = cube.coord(coord)
    else:
        _coord = coord
    dim_map = cube.coord_dims(_coord.name())
    _data = _coord.points
    if len(dim_map) > 0:
        _data = broadcast_to_shape(_data, cube.shape, dim_map)
        dc = [(c.copy(), cube.coord_dims(c)) for c in cube.dim_coords]
        ac = [(c.copy(), cube.coord_dims(c)) for c in cube.aux_coords]
        new_cube = iris.cube.Cube(
            data=_data,
            units=_coord.units,
            long_name=_coord.name(),
            dim_coords_and_dims=dc,
            aux_coords_and_dims=ac,
        )
    else:
        new_cube = iris.cube.Cube(data=_data, standard_name=_coord.name(), units=_coord.units)
    return new_cube
コード例 #12
0
 def test_same_shape(self):
     # broadcast to current shape should result in no change
     a = np.random.random([2, 3])
     b = broadcast_to_shape(a, a.shape, (0, 1))
     self.assertArrayEqual(b, a)
コード例 #13
0
ファイル: stats.py プロジェクト: js297/iris
def pearsonr(cube_a, cube_b, corr_coords=None, weights=None, mdtol=1.):
    """
    Calculate the Pearson's r correlation coefficient over specified
    dimensions.

    Args:

    * cube_a, cube_b (cubes):
        Cubes between which the correlation will be calculated.  The cubes
        should either be the same shape and have the same dimension coordinates
        or one cube should be broadcastable to the other.
    * corr_coords (str or list of str):
        The cube coordinate name(s) over which to calculate correlations. If no
        names are provided then correlation will be calculated over all common
        cube dimensions.
    * weights (numpy.ndarray, optional):
        Weights array of same shape as (the smaller of) cube_a and cube_b. Note
        that latitude/longitude area weights can be calculated using
        :func:`iris.analysis.cartography.area_weights`.
    * mdtol (float, optional):
        Tolerance of missing data. The missing data fraction is calculated
        based on the number of grid cells masked in both cube_a and cube_b. If
        this fraction exceed mdtol, the returned value in the corresponding
        cell is masked. mdtol=0 means no missing data is tolerated while
        mdtol=1 means the resulting element will be masked if and only if all
        contributing elements are masked in cube_a or cube_b. Defaults to 1.

    Returns:
        A cube of the correlation between the two input cubes along the
        specified dimensions, at each point in the remaining dimensions of the
        cubes.

        For example providing two time/altitude/latitude/longitude cubes and
        corr_coords of 'latitude' and 'longitude' will result in a
        time/altitude cube describing the latitude/longitude (i.e. pattern)
        correlation at each time/altitude point.

    Reference:
        http://www.statsoft.com/textbook/glosp.html#Pearson%20Correlation

    """

    # Assign larger cube to cube_1
    if cube_b.ndim > cube_a.ndim:
        cube_1 = cube_b
        cube_2 = cube_a
    else:
        cube_1 = cube_a
        cube_2 = cube_b

    dim_coords_1 = [coord.name() for coord in cube_1.dim_coords]
    dim_coords_2 = [coord.name() for coord in cube_2.dim_coords]
    common_dim_coords = list(set(dim_coords_1) & set(dim_coords_2))
    # If no coords passed then set to all common dimcoords of cubes.
    if corr_coords is None:
        corr_coords = common_dim_coords

    # Broadcast weights to shape of cube_1 if necessary.
    if weights is None or cube_1.shape == cube_2.shape:
        weights_1 = weights
        weights_2 = weights
    else:
        if weights.shape != cube_2.shape:
            raise ValueError("weights array should have dimensions {}".format(
                cube_2.shape))
        dims_1_common = [
            i for i in range(cube_1.ndim)
            if dim_coords_1[i] in common_dim_coords
        ]
        weights_1 = broadcast_to_shape(weights, cube_1.shape, dims_1_common)
        weights_2 = weights

    # Calculate correlations.
    s1 = cube_1 - cube_1.collapsed(
        corr_coords, iris.analysis.MEAN, weights=weights_1)
    s2 = cube_2 - cube_2.collapsed(
        corr_coords, iris.analysis.MEAN, weights=weights_2)

    covar = (s1 * s2).collapsed(corr_coords,
                                iris.analysis.SUM,
                                weights=weights_1,
                                mdtol=mdtol)
    var_1 = (s1**2).collapsed(corr_coords,
                              iris.analysis.SUM,
                              weights=weights_1)
    var_2 = (s2**2).collapsed(corr_coords,
                              iris.analysis.SUM,
                              weights=weights_2)

    denom = iris.analysis.maths.apply_ufunc(np.sqrt,
                                            var_1 * var_2,
                                            new_unit=covar.units)
    corr_cube = covar / denom
    corr_cube.rename("Pearson's r")

    return corr_cube
コード例 #14
0
ファイル: stats.py プロジェクト: AntoinedDMO/iris
def pearsonr(cube_a, cube_b, corr_coords=None, weights=None, mdtol=1.,
             common_mask=False):
    """
    Calculate the Pearson's r correlation coefficient over specified
    dimensions.

    Args:

    * cube_a, cube_b (cubes):
        Cubes between which the correlation will be calculated.  The cubes
        should either be the same shape and have the same dimension coordinates
        or one cube should be broadcastable to the other.
    * corr_coords (str or list of str):
        The cube coordinate name(s) over which to calculate correlations. If no
        names are provided then correlation will be calculated over all common
        cube dimensions.
    * weights (numpy.ndarray, optional):
        Weights array of same shape as (the smaller of) cube_a and cube_b. Note
        that latitude/longitude area weights can be calculated using
        :func:`iris.analysis.cartography.area_weights`.
    * mdtol (float, optional):
        Tolerance of missing data. The missing data fraction is calculated
        based on the number of grid cells masked in both cube_a and cube_b. If
        this fraction exceed mdtol, the returned value in the corresponding
        cell is masked. mdtol=0 means no missing data is tolerated while
        mdtol=1 means the resulting element will be masked if and only if all
        contributing elements are masked in cube_a or cube_b. Defaults to 1.
    * common_mask (bool):
        If True, applies a common mask to cube_a and cube_b so only cells which
        are unmasked in both cubes contribute to the calculation. If False, the
        variance for each cube is calculated from all available cells. Defaults
        to False.

    Returns:
        A cube of the correlation between the two input cubes along the
        specified dimensions, at each point in the remaining dimensions of the
        cubes.

        For example providing two time/altitude/latitude/longitude cubes and
        corr_coords of 'latitude' and 'longitude' will result in a
        time/altitude cube describing the latitude/longitude (i.e. pattern)
        correlation at each time/altitude point.

    Reference:
        http://www.statsoft.com/textbook/glosp.html#Pearson%20Correlation

    """

    # Assign larger cube to cube_1
    if cube_b.ndim > cube_a.ndim:
        cube_1 = cube_b
        cube_2 = cube_a
    else:
        cube_1 = cube_a
        cube_2 = cube_b

    dim_coords_1 = [coord.name() for coord in cube_1.dim_coords]
    dim_coords_2 = [coord.name() for coord in cube_2.dim_coords]
    common_dim_coords = list(set(dim_coords_1) & set(dim_coords_2))
    # If no coords passed then set to all common dimcoords of cubes.
    if corr_coords is None:
        corr_coords = common_dim_coords

    smaller_shape = cube_2.shape

    # Match up data masks if required.
    if common_mask:
        # Create a cube of 1's with a common mask.
        if ma.is_masked(cube_2.data):
            mask_cube = _ones_like(cube_2)
        else:
            mask_cube = 1.
        if ma.is_masked(cube_1.data):
            # Take a slice to avoid unnecessary broadcasting of cube_2.
            slice_coords = [dim_coords_1[i] for i in range(cube_1.ndim) if
                            dim_coords_1[i] not in common_dim_coords and
                            np.array_equal(cube_1.data.mask.any(axis=i),
                                           cube_1.data.mask.all(axis=i))]
            cube_1_slice = next(cube_1.slices_over(slice_coords))
            mask_cube = _ones_like(cube_1_slice) * mask_cube
        # Apply common mask to data.
        if isinstance(mask_cube, iris.cube.Cube):
            cube_1 = cube_1 * mask_cube
            cube_2 = mask_cube * cube_2
            dim_coords_2 = [coord.name() for coord in cube_2.dim_coords]

    # Broadcast weights to shape of cubes if necessary.
    if weights is None or cube_1.shape == smaller_shape:
        weights_1 = weights
        weights_2 = weights
    else:
        if weights.shape != smaller_shape:
            raise ValueError("weights array should have dimensions {}".
                             format(smaller_shape))

        dims_1_common = [i for i in range(cube_1.ndim) if
                         dim_coords_1[i] in common_dim_coords]
        weights_1 = broadcast_to_shape(weights, cube_1.shape, dims_1_common)
        if cube_2.shape != smaller_shape:
            dims_2_common = [i for i in range(cube_2.ndim) if
                             dim_coords_2[i] in common_dim_coords]
            weights_2 = broadcast_to_shape(weights, cube_2.shape,
                                           dims_2_common)
        else:
            weights_2 = weights

    # Calculate correlations.
    s1 = cube_1 - cube_1.collapsed(corr_coords, iris.analysis.MEAN,
                                   weights=weights_1)
    s2 = cube_2 - cube_2.collapsed(corr_coords, iris.analysis.MEAN,
                                   weights=weights_2)

    covar = (s1*s2).collapsed(corr_coords, iris.analysis.SUM,
                              weights=weights_1, mdtol=mdtol)
    var_1 = (s1**2).collapsed(corr_coords, iris.analysis.SUM,
                              weights=weights_1)
    var_2 = (s2**2).collapsed(corr_coords, iris.analysis.SUM,
                              weights=weights_2)

    denom = iris.analysis.maths.apply_ufunc(np.sqrt, var_1*var_2,
                                            new_unit=covar.units)
    corr_cube = covar / denom
    corr_cube.rename("Pearson's r")

    return corr_cube
コード例 #15
0
ファイル: stats.py プロジェクト: lauradomar/iris
def pearsonr(cube_a,
             cube_b,
             corr_coords=None,
             weights=None,
             mdtol=1.,
             common_mask=False):
    """
    Calculate the Pearson's r correlation coefficient over specified
    dimensions.

    Args:

    * cube_a, cube_b (cubes):
        Cubes between which the correlation will be calculated.  The cubes
        should either be the same shape and have the same dimension coordinates
        or one cube should be broadcastable to the other.
    * corr_coords (str or list of str):
        The cube coordinate name(s) over which to calculate correlations. If no
        names are provided then correlation will be calculated over all common
        cube dimensions.
    * weights (numpy.ndarray, optional):
        Weights array of same shape as (the smaller of) cube_a and cube_b. Note
        that latitude/longitude area weights can be calculated using
        :func:`iris.analysis.cartography.area_weights`.
    * mdtol (float, optional):
        Tolerance of missing data. The missing data fraction is calculated
        based on the number of grid cells masked in both cube_a and cube_b. If
        this fraction exceed mdtol, the returned value in the corresponding
        cell is masked. mdtol=0 means no missing data is tolerated while
        mdtol=1 means the resulting element will be masked if and only if all
        contributing elements are masked in cube_a or cube_b. Defaults to 1.
    * common_mask (bool):
        If True, applies a common mask to cube_a and cube_b so only cells which
        are unmasked in both cubes contribute to the calculation. If False, the
        variance for each cube is calculated from all available cells. Defaults
        to False.

    Returns:
        A cube of the correlation between the two input cubes along the
        specified dimensions, at each point in the remaining dimensions of the
        cubes.

        For example providing two time/altitude/latitude/longitude cubes and
        corr_coords of 'latitude' and 'longitude' will result in a
        time/altitude cube describing the latitude/longitude (i.e. pattern)
        correlation at each time/altitude point.

    Reference:
        http://www.statsoft.com/textbook/glosp.html#Pearson%20Correlation

    This operation is non-lazy.

    """

    # Assign larger cube to cube_1
    if cube_b.ndim > cube_a.ndim:
        cube_1 = cube_b
        cube_2 = cube_a
    else:
        cube_1 = cube_a
        cube_2 = cube_b

    dim_coords_1 = [coord.name() for coord in cube_1.dim_coords]
    dim_coords_2 = [coord.name() for coord in cube_2.dim_coords]
    common_dim_coords = list(set(dim_coords_1) & set(dim_coords_2))
    # If no coords passed then set to all common dimcoords of cubes.
    if corr_coords is None:
        corr_coords = common_dim_coords

    smaller_shape = cube_2.shape

    # Match up data masks if required.
    if common_mask:
        # Create a cube of 1's with a common mask.
        if ma.is_masked(cube_2.data):
            mask_cube = _ones_like(cube_2)
        else:
            mask_cube = 1.
        if ma.is_masked(cube_1.data):
            # Take a slice to avoid unnecessary broadcasting of cube_2.
            slice_coords = [
                dim_coords_1[i] for i in range(cube_1.ndim)
                if dim_coords_1[i] not in common_dim_coords and np.array_equal(
                    cube_1.data.mask.any(axis=i), cube_1.data.mask.all(axis=i))
            ]
            cube_1_slice = next(cube_1.slices_over(slice_coords))
            mask_cube = _ones_like(cube_1_slice) * mask_cube
        # Apply common mask to data.
        if isinstance(mask_cube, iris.cube.Cube):
            cube_1 = cube_1 * mask_cube
            cube_2 = mask_cube * cube_2
            dim_coords_2 = [coord.name() for coord in cube_2.dim_coords]

    # Broadcast weights to shape of cubes if necessary.
    if weights is None or cube_1.shape == smaller_shape:
        weights_1 = weights
        weights_2 = weights
    else:
        if weights.shape != smaller_shape:
            raise ValueError("weights array should have dimensions {}".format(
                smaller_shape))

        dims_1_common = [
            i for i in range(cube_1.ndim)
            if dim_coords_1[i] in common_dim_coords
        ]
        weights_1 = broadcast_to_shape(weights, cube_1.shape, dims_1_common)
        if cube_2.shape != smaller_shape:
            dims_2_common = [
                i for i in range(cube_2.ndim)
                if dim_coords_2[i] in common_dim_coords
            ]
            weights_2 = broadcast_to_shape(weights, cube_2.shape,
                                           dims_2_common)
        else:
            weights_2 = weights

    # Calculate correlations.
    s1 = cube_1 - cube_1.collapsed(
        corr_coords, iris.analysis.MEAN, weights=weights_1)
    s2 = cube_2 - cube_2.collapsed(
        corr_coords, iris.analysis.MEAN, weights=weights_2)

    covar = (s1 * s2).collapsed(corr_coords,
                                iris.analysis.SUM,
                                weights=weights_1,
                                mdtol=mdtol)
    var_1 = (s1**2).collapsed(corr_coords,
                              iris.analysis.SUM,
                              weights=weights_1)
    var_2 = (s2**2).collapsed(corr_coords,
                              iris.analysis.SUM,
                              weights=weights_2)

    denom = iris.analysis.maths.apply_ufunc(np.sqrt,
                                            var_1 * var_2,
                                            new_unit=covar.units)
    corr_cube = covar / denom
    corr_cube.rename("Pearson's r")

    return corr_cube
コード例 #16
0
ファイル: stats.py プロジェクト: ghislainp/iris
def pearsonr(cube_a, cube_b, corr_coords=None, weights=None, mdtol=1.):
    """
    Calculate the Pearson's r correlation coefficient over specified
    dimensions.

    Args:

    * cube_a, cube_b (cubes):
        Cubes between which the correlation will be calculated.  The cubes
        should either be the same shape and have the same dimension coordinates
        or one cube should be broadcastable to the other.
    * corr_coords (str or list of str):
        The cube coordinate name(s) over which to calculate correlations. If no
        names are provided then correlation will be calculated over all common
        cube dimensions.
    * weights (numpy.ndarray, optional):
        Weights array of same shape as (the smaller of) cube_a and cube_b. Note
        that latitude/longitude area weights can be calculated using
        :func:`iris.analysis.cartography.area_weights`.
    * mdtol (float, optional):
        Tolerance of missing data. The missing data fraction is calculated
        based on the number of grid cells masked in both cube_a and cube_b. If
        this fraction exceed mdtol, the returned value in the corresponding
        cell is masked. mdtol=0 means no missing data is tolerated while
        mdtol=1 means the resulting element will be masked if and only if all
        contributing elements are masked in cube_a or cube_b. Defaults to 1.

    Returns:
        A cube of the correlation between the two input cubes along the
        specified dimensions, at each point in the remaining dimensions of the
        cubes.

        For example providing two time/altitude/latitude/longitude cubes and
        corr_coords of 'latitude' and 'longitude' will result in a
        time/altitude cube describing the latitude/longitude (i.e. pattern)
        correlation at each time/altitude point.

    Reference:
        http://www.statsoft.com/textbook/glosp.html#Pearson%20Correlation

    """

    # Assign larger cube to cube_1
    if cube_b.ndim > cube_a.ndim:
        cube_1 = cube_b
        cube_2 = cube_a
    else:
        cube_1 = cube_a
        cube_2 = cube_b

    dim_coords_1 = [coord.name() for coord in cube_1.dim_coords]
    dim_coords_2 = [coord.name() for coord in cube_2.dim_coords]
    common_dim_coords = list(set(dim_coords_1) & set(dim_coords_2))
    # If no coords passed then set to all common dimcoords of cubes.
    if corr_coords is None:
        corr_coords = common_dim_coords

    # Broadcast weights to shape of cube_1 if necessary.
    if weights is None or cube_1.shape == cube_2.shape:
        weights_1 = weights
        weights_2 = weights
    else:
        if weights.shape != cube_2.shape:
            raise ValueError("weights array should have dimensions {}".
                             format(cube_2.shape))
        dims_1_common = [i for i in range(cube_1.ndim) if
                         dim_coords_1[i] in common_dim_coords]
        weights_1 = broadcast_to_shape(weights, cube_1.shape, dims_1_common)
        weights_2 = weights

    # Calculate correlations.
    s1 = cube_1 - cube_1.collapsed(corr_coords, iris.analysis.MEAN,
                                   weights=weights_1)
    s2 = cube_2 - cube_2.collapsed(corr_coords, iris.analysis.MEAN,
                                   weights=weights_2)

    covar = (s1*s2).collapsed(corr_coords, iris.analysis.SUM,
                              weights=weights_1, mdtol=mdtol)
    var_1 = (s1**2).collapsed(corr_coords, iris.analysis.SUM,
                              weights=weights_1)
    var_2 = (s2**2).collapsed(corr_coords, iris.analysis.SUM,
                              weights=weights_2)

    denom = iris.analysis.maths.apply_ufunc(np.sqrt, var_1*var_2,
                                            new_unit=covar.units)
    corr_cube = covar / denom
    corr_cube.rename("Pearson's r")

    return corr_cube
コード例 #17
0
 def test_same_shape(self):
     # broadcast to current shape should result in no change
     a = np.random.random([2, 3])
     b = broadcast_to_shape(a, a.shape, (0, 1))
     self.assertArrayEqual(b, a)