Beispiel #1
0
def _math_op_common(cube, operation_function, new_unit, new_dtype=None,
                    in_place=False):
    _assert_is_cube(cube)

    if in_place:
        new_cube = cube
        if cube.has_lazy_data():
            new_cube.data = operation_function(cube.lazy_data())
        else:
            try:
                operation_function(cube.data, out=cube.data)
            except TypeError:
                # Non ufunc function
                operation_function(cube.data)
    else:
        new_cube = cube.copy(data=operation_function(cube.core_data()))

    # If the result of the operation is scalar and masked, we need to fix up
    # the dtype
    if new_dtype is not None \
            and not new_cube.has_lazy_data() \
            and new_cube.data.shape == () \
            and ma.is_masked(new_cube.data):
        new_cube.data = ma.masked_array(0, 1, dtype=new_dtype)

    iris.analysis.clear_phenomenon_identity(new_cube)
    new_cube.units = new_unit
    return new_cube
Beispiel #2
0
def _math_op_common(cube, operation_function, new_unit, new_dtype=None,
                    in_place=False):
    _assert_is_cube(cube)

    if in_place:
        new_cube = cube
        if cube.has_lazy_data():
            new_cube.data = operation_function(cube.lazy_data())
        else:
            try:
                operation_function(cube.data, out=cube.data)
            except TypeError:
                # Non ufunc function
                operation_function(cube.data)
    else:
        new_cube = cube.copy(data=operation_function(cube.core_data()))

    # If the result of the operation is scalar and masked, we need to fix up
    # the dtype
    if new_dtype is not None \
            and not new_cube.has_lazy_data() \
            and new_cube.data.shape == () \
            and ma.is_masked(new_cube.data):
        new_cube.data = ma.masked_array(0, 1, dtype=new_dtype)

    iris.analysis.clear_phenomenon_identity(new_cube)
    new_cube.units = new_unit
    return new_cube
Beispiel #3
0
def _compute_anomalies(cube, reference, period, seasons):
    cube_coord = _get_period_coord(cube, period, seasons)
    ref_coord = _get_period_coord(reference, period, seasons)

    data = cube.core_data()
    cube_time = cube.coord('time')
    ref = {}
    for ref_slice in reference.slices_over(ref_coord):
        ref[ref_slice.coord(ref_coord).points[0]] = ref_slice.core_data()

    cube_coord_dim = cube.coord_dims(cube_coord)[0]
    slicer = [slice(None)] * len(data.shape)
    new_data = []
    for i in range(cube_time.shape[0]):
        slicer[cube_coord_dim] = i
        new_data.append(data[tuple(slicer)] - ref[cube_coord.points[i]])
    data = da.stack(new_data, axis=cube_coord_dim)
    cube = cube.copy(data)
    cube.remove_coord(cube_coord)
    return cube
Beispiel #4
0
def _math_op_common(
    cube,
    operation_function,
    new_unit,
    new_dtype=None,
    in_place=False,
    skeleton_cube=False,
):
    _assert_is_cube(cube)

    if in_place and not skeleton_cube:
        if cube.has_lazy_data():
            cube.data = operation_function(cube.lazy_data())
        else:
            try:
                operation_function(cube.data, out=cube.data)
            except TypeError:
                # Non-ufunc function
                operation_function(cube.data)
        new_cube = cube
    else:
        data = operation_function(cube.core_data())
        if skeleton_cube:
            # Simply wrap the resultant data in a cube, as no
            # cube metadata is required by the caller.
            new_cube = iris.cube.Cube(data)
        else:
            new_cube = cube.copy(data)

    # If the result of the operation is scalar and masked, we need to fix-up the dtype.
    if (new_dtype is not None and not new_cube.has_lazy_data()
            and new_cube.data.shape == () and ma.is_masked(new_cube.data)):
        new_cube.data = ma.masked_array(0, 1, dtype=new_dtype)

    _sanitise_metadata(new_cube, new_unit)

    return new_cube
Beispiel #5
0
def anomalies(cube,
              period,
              reference=None,
              standardize=False,
              seasons=('DJF', 'MAM', 'JJA', 'SON')):
    """Compute anomalies using a mean with the specified granularity.

    Computes anomalies based on daily, monthly, seasonal or yearly means for
    the full available period

    Parameters
    ----------
    cube: iris.cube.Cube
        input cube.

    period: str
        Period to compute the statistic over.
        Available periods: 'full', 'season', 'seasonal', 'monthly', 'month',
        'mon', 'daily', 'day'

    reference: list int, optional, default: None
        Period of time to use a reference, as needed for the 'extract_time'
        preprocessor function
        If None, all available data is used as a reference

    standardize: bool, optional
        If True standardized anomalies are calculated

    seasons: list or tuple of str, optional
        Seasons to use if needed. Defaults to ('DJF', 'MAM', 'JJA', 'SON')

    Returns
    -------
    iris.cube.Cube
        Anomalies cube
    """
    if reference is None:
        reference_cube = cube
    else:
        reference_cube = extract_time(cube, **reference)
    reference = climate_statistics(reference_cube,
                                   period=period,
                                   seasons=seasons)
    if period in ['full']:
        metadata = copy.deepcopy(cube.metadata)
        cube = cube - reference
        cube.metadata = metadata
        if standardize:
            cube_stddev = climate_statistics(cube,
                                             operator='std_dev',
                                             period=period,
                                             seasons=seasons)
            cube = cube / cube_stddev
            cube.units = '1'
        return cube

    cube = _compute_anomalies(cube, reference, period, seasons)

    # standardize the results if requested
    if standardize:
        cube_stddev = climate_statistics(cube,
                                         operator='std_dev',
                                         period=period)
        tdim = cube.coord_dims('time')[0]
        reps = cube.shape[tdim] / cube_stddev.shape[tdim]
        if not reps % 1 == 0:
            raise ValueError(
                "Cannot safely apply preprocessor to this dataset, "
                "since the full time period of this dataset is not "
                f"a multiple of the period '{period}'")
        cube.data = cube.core_data() / da.concatenate(
            [cube_stddev.core_data() for _ in range(int(reps))], axis=tdim)
        cube.units = '1'
    return cube