Exemplo n.º 1
0
    def __init__(self, projection=None):
        """
        Nearest regridding scheme that uses scipy.interpolate.griddata on
        projected unstructured data.

        .. note::

            .. deprecated:: 3.2.0

            This class is scheduled to be removed in a future release, and no
            exact replacement is currently planned.
            Please use :class:`iris.analysis.UnstructuredNearest` instead, if
            possible.  If you have a need for this exact functionality, please
            contact the Iris Developers to discuss how to retain it (which
            could include reversing the deprecation).

        Optional Args:

        * projection: `cartopy.crs instance`
            The projection that the scipy calculation is performed in.
            If None is given, a PlateCarree projection is used. Defaults to
            None.

        """
        self.projection = projection
        wmsg = (
            "iris.experimental.regrid.ProjectedUnstructuredNearest has been "
            "deprecated, and will be removed in a future release.  "
            "Please use 'iris.analysis.UnstructuredNearest' instead, where "
            "possible.  Consult the docstring for details."
        )
        warn_deprecated(wmsg)
Exemplo n.º 2
0
def _netcdf_promote_warning():
    msg = ('NetCDF default loading behaviour currently does not expose '
           'variables which define reference surfaces for dimensionless '
           'vertical coordinates as independent Cubes. This behaviour is '
           'deprecated in favour of automatic promotion to Cubes. To switch '
           'to the new behaviour, set iris.FUTURE.netcdf_promote to True.')
    warn_deprecated(msg)
Exemplo n.º 3
0
    def __init__(self, projection=None):
        """
        Linear regridding scheme that uses scipy.interpolate.griddata on
        projected unstructured data.

        .. note::

            .. deprecated:: 3.2.0

            This class is scheduled to be removed in a future release, and no
            replacement is currently planned.
            If you make use of this functionality, please contact the Iris
            Developers to discuss how to retain it (which could include
            reversing the deprecation).

        Optional Args:

        * projection: `cartopy.crs instance`
            The projection that the scipy calculation is performed in.
            If None is given, a PlateCarree projection is used. Defaults to
            None.

        """
        self.projection = projection
        wmsg = (
            "The class iris.experimental.regrid.ProjectedUnstructuredLinear "
            "has been deprecated, and will be removed in a future release.  "
            "Please consult the docstring for details."
        )
        warn_deprecated(wmsg)
Exemplo n.º 4
0
    def __new__(cls, field_generator, field_generator_kwargs, converter,
                legacy_custom_rules=None):
        """
        Create a definition of a field-based Cube loader.

        Args:

        * field_generator
            A callable that accepts a filename as its first argument and
            returns an iterable of field objects.

        * field_generator_kwargs
            Additional arguments to be passed to the field_generator.

        * converter
            A callable that converts a field object into a Cube.

        Kwargs:

        * legacy_custom_rules
            An object with a callable `verify` attribute with two
            parameters: (cube, field). Legacy method for modifying
            Cubes during the load process. Default is None.

            .. deprecated:: 1.9

        """
        if legacy_custom_rules is not None:
            warn_deprecated('The `legacy_custom_rules` attribute is '
                            'deprecated.')
        return tuple.__new__(cls, (field_generator, field_generator_kwargs,
                                   converter, legacy_custom_rules))
Exemplo n.º 5
0
def sample_data_path(*path_to_join):
    """
    Given the sample data resource, returns the full path to the file.

    .. note::

        This function is only for locating files in the iris sample data
        collection (installed separately from iris). It is not needed or
        appropriate for general file access.

    """
    target = os.path.join(*path_to_join)
    if os.path.isabs(target):
        raise ValueError('Absolute paths, such as {!r}, are not supported.\n'
                         'NB. This function is only for locating files in the '
                         'iris sample data collection. It is not needed or '
                         'appropriate for general file access.'.format(target))
    if iris_sample_data is not None:
        target = os.path.join(iris_sample_data.path, target)
    else:
        wmsg = ("iris.config.SAMPLE_DATA_DIR was deprecated in v1.10.0 and "
                "will be removed in a future Iris release. Install the "
                "'iris_sample_data' package.")
        warn_deprecated(wmsg)
        target = os.path.join(iris.config.SAMPLE_DATA_DIR, target)
    if not glob.glob(target):
        raise ValueError('Sample data file(s) at {!r} not found.\n'
                         'NB. This function is only for locating files in the '
                         'iris sample data collection. It is not needed or '
                         'appropriate for general file access.'.format(target))
    return target
Exemplo n.º 6
0
    def __init__(self, conditions, actions):
        """Create instance methods from our conditions and actions."""
        if _enable_rules_deprecations:
            warn_deprecated(
                "the `iris.fileformats.rules.Rule class is deprecated.")
        if not hasattr(conditions, '__iter__'):
            raise TypeError('Variable conditions should be iterable, got: ' +
                            type(conditions))
        if not hasattr(actions, '__iter__'):
            raise TypeError('Variable actions should be iterable, got: ' +
                            type(actions))

        self._conditions = conditions
        self._actions = actions
        self._exec_actions = []

        self.id = str(hash((tuple(self._conditions), tuple(self._actions))))

        for i, condition in enumerate(conditions):
            self._conditions[i] = condition

        # Create the conditions method.
        self._create_conditions_method()

        # Create the action methods.
        for i, action in enumerate(self._actions):
            if not action:
                action = 'None'
            self._create_action_method(i, action)
Exemplo n.º 7
0
    def __new__(cls,
                field_generator,
                field_generator_kwargs,
                converter,
                legacy_custom_rules=None):
        """
        Create a definition of a field-based Cube loader.

        Args:

        * field_generator
            A callable that accepts a filename as its first argument and
            returns an iterable of field objects.

        * field_generator_kwargs
            Additional arguments to be passed to the field_generator.

        * converter
            A callable that converts a field object into a Cube.

        Kwargs:

        * legacy_custom_rules
            An object with a callable `verify` attribute with two
            parameters: (cube, field). Legacy method for modifying
            Cubes during the load process. Default is None.

            .. deprecated:: 1.9

        """
        if legacy_custom_rules is not None:
            warn_deprecated('The `legacy_custom_rules` attribute is '
                            'deprecated.')
        return tuple.__new__(cls, (field_generator, field_generator_kwargs,
                                   converter, legacy_custom_rules))
Exemplo n.º 8
0
Arquivo: cf.py Projeto: rcomer/iris
def _netcdf_promote_warning():
    msg = ('NetCDF default loading behaviour currently does not expose '
           'variables which define reference surfaces for dimensionless '
           'vertical coordinates as independent Cubes. This behaviour is '
           'deprecated in favour of automatic promotion to Cubes. To switch '
           'to the new behaviour, set iris.FUTURE.netcdf_promote to True.')
    warn_deprecated(msg)
Exemplo n.º 9
0
def sample_data_path(*path_to_join):
    """
    Given the sample data resource, returns the full path to the file.

    .. note::

        This function is only for locating files in the iris sample data
        collection (installed separately from iris). It is not needed or
        appropriate for general file access.

    """
    target = os.path.join(*path_to_join)
    if os.path.isabs(target):
        raise ValueError('Absolute paths, such as {!r}, are not supported.\n'
                         'NB. This function is only for locating files in the '
                         'iris sample data collection. It is not needed or '
                         'appropriate for general file access.'.format(target))
    if iris_sample_data is not None:
        target = os.path.join(iris_sample_data.path, target)
    else:
        wmsg = ("iris.config.SAMPLE_DATA_DIR was deprecated in v1.10.0 and "
                "will be removed in a future Iris release. Install the "
                "'iris_sample_data' package.")
        warn_deprecated(wmsg)
        target = os.path.join(iris.config.SAMPLE_DATA_DIR, target)
    if not glob.glob(target):
        raise ValueError('Sample data file(s) at {!r} not found.\n'
                         'NB. This function is only for locating files in the '
                         'iris sample data collection. It is not needed or '
                         'appropriate for general file access.'.format(target))
    return target
Exemplo n.º 10
0
    def __init__(self, conditions, actions):
        """Create instance methods from our conditions and actions."""
        if _enable_rules_deprecations:
            warn_deprecated(
                "the `iris.fileformats.rules.Rule class is deprecated.")
        if not hasattr(conditions, '__iter__'):
            raise TypeError('Variable conditions should be iterable, got: '+ type(conditions))
        if not hasattr(actions, '__iter__'):
            raise TypeError('Variable actions should be iterable, got: '+ type(actions))

        self._conditions = conditions
        self._actions = actions
        self._exec_actions = []

        self.id = str(hash((tuple(self._conditions), tuple(self._actions))))

        for i, condition in enumerate(conditions):
            self._conditions[i] = condition

        # Create the conditions method.
        self._create_conditions_method()

        # Create the action methods.
        for i, action in enumerate(self._actions):
            if not action:
                action = 'None'
            self._create_action_method(i, action)
Exemplo n.º 11
0
def calculate_forecast_period(time, forecast_reference_time):
    """
    Return the forecast period in hours derived from time and
    forecast_reference_time scalar coordinates.

    .. deprecated:: 1.10

    """
    warn_deprecated("the `iris.fileformats.rules.calculate_forecast_period "
                    "method is deprecated.")

    if time.points.size != 1:
        raise ValueError('Expected a time coordinate with a single '
                         'point. {!r} has {} points.'.format(
                             time.name(), time.points.size))

    if not time.has_bounds():
        raise ValueError('Expected a time coordinate with bounds.')

    if forecast_reference_time.points.size != 1:
        raise ValueError('Expected a forecast_reference_time coordinate '
                         'with a single point. {!r} has {} '
                         'points.'.format(forecast_reference_time.name(),
                                          forecast_reference_time.points.size))

    origin = time.units.origin.replace(time.units.origin.split()[0], 'hours')
    units = cf_units.Unit(origin, calendar=time.units.calendar)

    # Determine start and eof of period in hours since a common epoch.
    end = time.units.convert(time.bounds[0, 1], units)
    start = forecast_reference_time.units.convert(
        forecast_reference_time.points[0], units)
    forecast_period = end - start

    return forecast_period
Exemplo n.º 12
0
def calculate_forecast_period(time, forecast_reference_time):
    """
    Return the forecast period in hours derived from time and
    forecast_reference_time scalar coordinates.

    .. deprecated:: 1.10

    """
    warn_deprecated("the `iris.fileformats.rules.calculate_forecast_period "
                    "method is deprecated.")

    if time.points.size != 1:
        raise ValueError('Expected a time coordinate with a single '
                         'point. {!r} has {} points.'.format(time.name(),
                                                             time.points.size))

    if not time.has_bounds():
        raise ValueError('Expected a time coordinate with bounds.')

    if forecast_reference_time.points.size != 1:
        raise ValueError('Expected a forecast_reference_time coordinate '
                         'with a single point. {!r} has {} '
                         'points.'.format(forecast_reference_time.name(),
                                          forecast_reference_time.points.size))

    origin = time.units.origin.replace(time.units.origin.split()[0], 'hours')
    units = cf_units.Unit(origin, calendar=time.units.calendar)

    # Determine start and eof of period in hours since a common epoch.
    end = time.units.convert(time.bounds[0, 1], units)
    start = forecast_reference_time.units.convert(
        forecast_reference_time.points[0], units)
    forecast_period = end - start

    return forecast_period
Exemplo n.º 13
0
Arquivo: maths.py Projeto: rcomer/iris
def _add_subtract_common(operation_function, operation_name, cube, other,
                         dim=None, ignore=True, in_place=False):
    """
    Function which shares common code between addition and subtraction
    of cubes.

    operation_function   - function which does the operation
                           (e.g. numpy.subtract)
    operation_name       - the public name of the operation (e.g. 'divide')
    cube                 - the cube whose data is used as the first argument
                           to `operation_function`
    other                - the cube, coord, ndarray or number whose data is
                           used as the second argument
    dim                  - dimension along which to apply `other` if it's a
                           coordinate that is not found in `cube`
    ignore               - The value of this argument is ignored.
        .. deprecated:: 0.8
    in_place             - whether or not to apply the operation in place to
                           `cube` and `cube.data`

    """
    _assert_is_cube(cube)
    _assert_matching_units(cube, other, operation_name)

    if isinstance(other, iris.cube.Cube):
        # get a coordinate comparison of this cube and the cube to do the
        # operation with
        coord_comp = iris.analysis.coord_comparison(cube, other)

        # provide a deprecation warning if the ignore keyword has been set
        if ignore is not True:
            msg = ('The "ignore" keyword has been deprecated in '
                   'add/subtract. This functionality is now automatic. '
                   'The provided value to "ignore" has been ignored, '
                   'and has been automatically calculated.')
            warn_deprecated(msg)

        bad_coord_grps = (coord_comp['ungroupable_and_dimensioned'] +
                          coord_comp['resamplable'])
        if bad_coord_grps:
            raise ValueError('This operation cannot be performed as there are '
                             'differing coordinates (%s) remaining '
                             'which cannot be ignored.'
                             % ', '.join({coord_grp.name() for coord_grp
                                          in bad_coord_grps}))
    else:
        coord_comp = None

    new_cube = _binary_op_common(operation_function, operation_name, cube,
                                 other, cube.units, dim, in_place)

    if coord_comp:
        # If a coordinate is to be ignored - remove it
        ignore = filter(None, [coord_grp[0] for coord_grp
                               in coord_comp['ignorable']])
        for coord in ignore:
            new_cube.remove_coord(coord)

    return new_cube
Exemplo n.º 14
0
def _add_subtract_common(operation_function, operation_name, cube, other,
                         dim=None, ignore=True, in_place=False):
    """
    Function which shares common code between addition and subtraction
    of cubes.

    operation_function   - function which does the operation
                           (e.g. numpy.subtract)
    operation_name       - the public name of the operation (e.g. 'divide')
    cube                 - the cube whose data is used as the first argument
                           to `operation_function`
    other                - the cube, coord, ndarray or number whose data is
                           used as the second argument
    dim                  - dimension along which to apply `other` if it's a
                           coordinate that is not found in `cube`
    ignore               - The value of this argument is ignored.
        .. deprecated:: 0.8
    in_place             - whether or not to apply the operation in place to
                           `cube` and `cube.data`

    """
    _assert_is_cube(cube)
    _assert_matching_units(cube, other, operation_name)

    if isinstance(other, iris.cube.Cube):
        # get a coordinate comparison of this cube and the cube to do the
        # operation with
        coord_comp = iris.analysis.coord_comparison(cube, other)

        # provide a deprecation warning if the ignore keyword has been set
        if ignore is not True:
            msg = ('The "ignore" keyword has been deprecated in '
                   'add/subtract. This functionality is now automatic. '
                   'The provided value to "ignore" has been ignored, '
                   'and has been automatically calculated.')
            warn_deprecated(msg)

        bad_coord_grps = (coord_comp['ungroupable_and_dimensioned'] +
                          coord_comp['resamplable'])
        if bad_coord_grps:
            raise ValueError('This operation cannot be performed as there are '
                             'differing coordinates (%s) remaining '
                             'which cannot be ignored.'
                             % ', '.join({coord_grp.name() for coord_grp
                                          in bad_coord_grps}))
    else:
        coord_comp = None

    new_cube = _binary_op_common(operation_function, operation_name, cube,
                                 other, cube.units, dim, in_place)

    if coord_comp:
        # If a coordinate is to be ignored - remove it
        ignore = filter(None, [coord_grp[0] for coord_grp
                               in coord_comp['ignorable']])
        for coord in ignore:
            new_cube.remove_coord(coord)

    return new_cube
Exemplo n.º 15
0
def reset_load_rules():
    """
    Resets the GRIB load process to use only the standard conversion rules.

    .. deprecated:: 1.7

    """
    warn_deprecated('reset_load_rules was deprecated in v1.7.')
Exemplo n.º 16
0
def reset_load_rules():
    """
    Resets the GRIB load process to use only the standard conversion rules.

    .. deprecated:: 1.7

    """
    warn_deprecated('reset_load_rules was deprecated in v1.7.')
Exemplo n.º 17
0
 def __setattr__(self, name, value):
     if name in self.deprecated_options:
         msg = ("the 'Future' object property {!r} is now deprecated. "
                "Please remove code which uses this.")
         warn_deprecated(msg.format(name))
     if name not in self.__dict__:
         msg = "'Future' object has no attribute {!r}".format(name)
         raise AttributeError(msg)
     self.__dict__[name] = value
Exemplo n.º 18
0
 def __setattr__(self, name, value):
     if name in self.deprecated_options:
         msg = ("the 'Future' object property {!r} is now deprecated. "
                "Please remove code which uses this.")
         warn_deprecated(msg.format(name))
     if name not in self.__dict__:
         msg = "'Future' object has no attribute {!r}".format(name)
         raise AttributeError(msg)
     self.__dict__[name] = value
Exemplo n.º 19
0
 def __init__(self, coord, dims=None):
     warn_deprecated(
         "the `iris.fileformats.rules.CoordAndDims class is deprecated.")
     self.coord = coord
     if dims is None:
         dims = []
     if not isinstance(dims, list):
         dims = [dims]
     self.dims = dims
Exemplo n.º 20
0
 def __init__(self, coord, dims=None):
     warn_deprecated(
         "the `iris.fileformats.rules.CoordAndDims class is deprecated.")
     self.coord = coord
     if dims is None:
         dims = []
     if not isinstance(dims, list):
         dims = [dims]
     self.dims = dims
Exemplo n.º 21
0
    def loadcubes_user_callback_wrapper(cube, field, filename):
        # First run any custom user-provided rules.
        if loader.legacy_custom_rules:
            warn_deprecated('The `legacy_custom_rules` attribute of '
                            'the `loader` is deprecated.')
            loader.legacy_custom_rules.verify(cube, field)

        # Then also run user-provided original callback function.
        cube = iris.io.run_callback(user_callback, cube, field, filename)
        return cube
Exemplo n.º 22
0
    def loadcubes_user_callback_wrapper(cube, field, filename):
        # First run any custom user-provided rules.
        if loader.legacy_custom_rules:
            warn_deprecated('The `legacy_custom_rules` attribute of '
                            'the `loader` is deprecated.')
            loader.legacy_custom_rules.verify(cube, field)

        # Then also run user-provided original callback function.
        cube = iris.io.run_callback(user_callback, cube, field, filename)
        return cube
Exemplo n.º 23
0
def as_pairs(cube):
    """
    .. deprecated:: 1.10
    Please use :func:`iris.fileformats.grib.save_pairs_from_cube`
    for the same functionality.


    """
    warn_deprecated("as_pairs is deprecated in v1.10; please use" " save_pairs_from_cube instead.")
    return save_pairs_from_cube(cube)
Exemplo n.º 24
0
def as_pairs(cube):
    """
    .. deprecated:: 1.10
    Please use :func:`iris.fileformats.grib.save_pairs_from_cube`
    for the same functionality.


    """
    warn_deprecated('as_pairs is deprecated in v1.10; please use'
                    ' save_pairs_from_cube instead.')
    return save_pairs_from_cube(cube)
Exemplo n.º 25
0
    def loadcubes_user_callback_wrapper(cube, field, filename):
        # First run any custom user-provided rules.
        if loader.legacy_custom_rules:
            warn_deprecated('The `legacy_custom_rules` attribute of '
                            'the `loader` is deprecated.')
            loader.legacy_custom_rules.verify(cube, field)

        # Then also run user-provided original callback function.
        result = cube
        if user_callback is not None:
            result = user_callback(cube, field, filename)
        return result
Exemplo n.º 26
0
def _construct_midpoint_coord(coord, circular=None):
    """
    Return a coordinate of mid-points from the given coordinate. If the
    given coordinate has length n and the circular flag set then the
    result will be a coordinate of length n, otherwise the result will be
    of length n-1.

    """
    if circular and not hasattr(coord, 'circular'):
        raise ValueError('Cannot produce circular midpoint from a coord '
                         'without the circular attribute')

    if circular is None:
        circular = getattr(coord, 'circular', False)
    elif circular != getattr(coord, 'circular', False):
        warn_deprecated('circular flag and Coord.circular attribute do '
                        'not match')

    if coord.ndim != 1:
        raise iris.exceptions.CoordinateMultiDimError(coord)
    if coord.shape == (1,) and not circular:
        raise ValueError('Cannot take the midpoints of a single valued '
                         'coordinate.')

    # Calculate the delta of the coordinate
    # (this deals with circularity nicely).
    mid_point_coord = _construct_delta_coord(coord)

    # if the coord is circular then include the last one, else, just take 0:-1
    circular_slice = slice(0, -1 if not circular else None)

    if coord.bounds is not None:
        axis_delta = mid_point_coord.bounds
        mid_point_bounds = axis_delta * 0.5 + coord.bounds[circular_slice, :]
    else:
        mid_point_bounds = None

    # Get the deltas
    axis_delta = mid_point_coord.points
    # Add half of the deltas to the original points
    # if the coord is circular then include the last one, else, just take 0:-1
    mid_point_points = axis_delta * 0.5 + coord.points[circular_slice]

    # Try creating a coordinate of the same type as before, otherwise,
    # make an AuxCoord.
    try:
        mid_point_coord = coord.from_coord(coord).copy(mid_point_points,
                                                       mid_point_bounds)
    except ValueError:
        mid_point_coord = iris.coords.AuxCoord.from_coord(coord).copy(
            mid_point_points, mid_point_bounds)

    return mid_point_coord
Exemplo n.º 27
0
def _construct_midpoint_coord(coord, circular=None):
    """
    Return a coordinate of mid-points from the given coordinate. If the
    given coordinate has length n and the circular flag set then the
    result will be a coordinate of length n, otherwise the result will be
    of length n-1.

    """
    if circular and not hasattr(coord, 'circular'):
        raise ValueError('Cannot produce circular midpoint from a coord '
                         'without the circular attribute')

    if circular is None:
        circular = getattr(coord, 'circular', False)
    elif circular != getattr(coord, 'circular', False):
        warn_deprecated('circular flag and Coord.circular attribute do '
                        'not match')

    if coord.ndim != 1:
        raise iris.exceptions.CoordinateMultiDimError(coord)
    if coord.shape == (1, ) and not circular:
        raise ValueError('Cannot take the midpoints of a single valued '
                         'coordinate.')

    # Calculate the delta of the coordinate
    # (this deals with circularity nicely).
    mid_point_coord = _construct_delta_coord(coord)

    # if the coord is circular then include the last one, else, just take 0:-1
    circular_slice = slice(0, -1 if not circular else None)

    if coord.bounds is not None:
        axis_delta = mid_point_coord.bounds
        mid_point_bounds = axis_delta * 0.5 + coord.bounds[circular_slice, :]
    else:
        mid_point_bounds = None

    # Get the deltas
    axis_delta = mid_point_coord.points
    # Add half of the deltas to the original points
    # if the coord is circular then include the last one, else, just take 0:-1
    mid_point_points = axis_delta * 0.5 + coord.points[circular_slice]

    # Try creating a coordinate of the same type as before, otherwise,
    # make an AuxCoord.
    try:
        mid_point_coord = coord.from_coord(coord).copy(mid_point_points,
                                                       mid_point_bounds)
    except ValueError:
        mid_point_coord = iris.coords.AuxCoord.from_coord(coord).copy(
            mid_point_points, mid_point_bounds)

    return mid_point_coord
Exemplo n.º 28
0
    def loadcubes_user_callback_wrapper(cube, field, filename):
        # First run any custom user-provided rules.
        if loader.legacy_custom_rules:
            warn_deprecated('The `legacy_custom_rules` attribute of '
                            'the `loader` is deprecated.')
            loader.legacy_custom_rules.verify(cube, field)

        # Then also run user-provided original callback function.
        result = cube
        if user_callback is not None:
            result = user_callback(cube, field, filename)
        return result
Exemplo n.º 29
0
    def __init__(self, grib_message, grib_fh=None, auto_regularise=True):
        warn_deprecated('Deprecated at version 1.10')
        """Store the grib message and compute our extra keys."""
        self.grib_message = grib_message
        deferred = grib_fh is not None

        # Store the file pointer and message length from the current
        # grib message before it's changed by calls to the grib-api.
        if deferred:
            # Note that, the grib-api has already read this message and 
            # advanced the file pointer to the end of the message.
            offset = grib_fh.tell()
            message_length = gribapi.grib_get_long(grib_message, 'totalLength')

        if auto_regularise and _is_quasi_regular_grib(grib_message):
            warnings.warn('Regularising GRIB message.')
            if deferred:
                self._regularise_shape(grib_message)
            else:
                _regularise(grib_message)

        # Initialise the key-extension dictionary.
        # NOTE: this attribute *must* exist, or the the __getattr__ overload
        # can hit an infinite loop.
        self.extra_keys = {}
        self._confirm_in_scope()
        self._compute_extra_keys()

        # Calculate the data payload shape.
        shape = (gribapi.grib_get_long(grib_message, 'numberOfValues'),)

        if not self.gridType.startswith('reduced'):
            ni, nj = self.Ni, self.Nj
            j_fast = gribapi.grib_get_long(grib_message,
                                           'jPointsAreConsecutive')
            shape = (nj, ni) if j_fast == 0 else (ni, nj)

        if deferred:
            # Wrap the reference to the data payload within the data proxy
            # in order to support deferred data loading.
            # The byte offset requires to be reset back to the first byte
            # of this message. The file pointer offset is always at the end 
            # of the current message due to the grib-api reading the message.
            proxy = GribDataProxy(shape, np.zeros(.0).dtype, np.nan,
                                  grib_fh.name,
                                  offset - message_length,
                                  auto_regularise)
            self._data = biggus.NumpyArrayAdapter(proxy)
        else:
            self.data = _message_values(grib_message, shape)
Exemplo n.º 30
0
    def __init__(self, grib_message, grib_fh=None, auto_regularise=True):
        warn_deprecated('Deprecated at version 1.10')
        """Store the grib message and compute our extra keys."""
        self.grib_message = grib_message
        deferred = grib_fh is not None

        # Store the file pointer and message length from the current
        # grib message before it's changed by calls to the grib-api.
        if deferred:
            # Note that, the grib-api has already read this message and
            # advanced the file pointer to the end of the message.
            offset = grib_fh.tell()
            message_length = gribapi.grib_get_long(grib_message, 'totalLength')

        if auto_regularise and _is_quasi_regular_grib(grib_message):
            warnings.warn('Regularising GRIB message.')
            if deferred:
                self._regularise_shape(grib_message)
            else:
                _regularise(grib_message)

        # Initialise the key-extension dictionary.
        # NOTE: this attribute *must* exist, or the the __getattr__ overload
        # can hit an infinite loop.
        self.extra_keys = {}
        self._confirm_in_scope()
        self._compute_extra_keys()

        # Calculate the data payload shape.
        shape = (gribapi.grib_get_long(grib_message, 'numberOfValues'), )

        if not self.gridType.startswith('reduced'):
            ni, nj = self.Ni, self.Nj
            j_fast = gribapi.grib_get_long(grib_message,
                                           'jPointsAreConsecutive')
            shape = (nj, ni) if j_fast == 0 else (ni, nj)

        if deferred:
            # Wrap the reference to the data payload within the data proxy
            # in order to support deferred data loading.
            # The byte offset requires to be reset back to the first byte
            # of this message. The file pointer offset is always at the end
            # of the current message due to the grib-api reading the message.
            proxy = GribDataProxy(shape,
                                  np.zeros(.0).dtype, np.nan, grib_fh.name,
                                  offset - message_length, auto_regularise)
            self._data = biggus.NumpyArrayAdapter(proxy)
        else:
            self.data = _message_values(grib_message, shape)
Exemplo n.º 31
0
def load_cubes(filenames, callback=None, auto_regularise=True):
    """
    Returns a generator of cubes from the given list of filenames.

    Args:

    * filenames (string/list):
        One or more GRIB filenames to load from.

    Kwargs:

    * callback (callable function):
        Function which can be passed on to :func:`iris.io.run_callback`.

    * auto_regularise (*True* | *False*):
        If *True*, any cube defined on a reduced grid will be interpolated
        to an equivalent regular grid. If *False*, any cube defined on a
        reduced grid will be loaded on the raw reduced grid with no shape
        information. If `iris.FUTURE.strict_grib_load` is `True` then this
        keyword has no effect, raw grids are always used. If the older GRIB
        loader is in use then the default behaviour is to interpolate cubes
        on a reduced grid to an equivalent regular grid.

        .. deprecated:: 1.8. Please use strict_grib_load and regrid instead.


    """
    if iris.FUTURE.strict_grib_load:
        grib_loader = iris.fileformats.rules.Loader(
            GribMessage.messages_from_filename, {}, iris.fileformats.grib._load_convert.convert
        )
    else:
        if auto_regularise is not None:
            # The old loader supports the auto_regularise keyword, but in
            # deprecation mode, so warning if it is found.
            msg = (
                "the`auto_regularise` kwarg is deprecated and "
                "will be removed in a future release. Resampling "
                "quasi-regular grids on load will no longer be "
                "available.  Resampling should be done on the "
                "loaded cube instead using Cube.regrid."
            )
            warn_deprecated(msg)

        grib_loader = iris.fileformats.rules.Loader(
            grib_generator, {"auto_regularise": auto_regularise}, iris.fileformats.grib.load_rules.convert
        )
    return iris.fileformats.rules.load_cubes(filenames, callback, grib_loader)
Exemplo n.º 32
0
    def __init__(self, filepath=None, rule_type=FunctionRule):
        """Create a new rule set, optionally adding rules from the specified file.

        The rule_type defaults to :class:`FunctionRule`,
        e.g for CM loading actions that return objects, such as *AuxCoord(...)*

        rule_type can also be set to :class:`ProcedureRule`
        e.g for PP saving actions that do not return anything, such as *pp.lbuser[3] = 16203*
        """
        if _enable_rules_deprecations:
            warn_deprecated(
                "the `iris.fileformats.rules.RulesContainer class is deprecated.")
        self._rules = []
        self.rule_type = rule_type
        if filepath is not None:
            self.import_rules(filepath)
Exemplo n.º 33
0
    def __init__(self, filepath=None, rule_type=FunctionRule):
        """Create a new rule set, optionally adding rules from the specified file.

        The rule_type defaults to :class:`FunctionRule`,
        e.g for CM loading actions that return objects, such as *AuxCoord(...)*

        rule_type can also be set to :class:`ProcedureRule`
        e.g for PP saving actions that do not return anything, such as *pp.lbuser[3] = 16203*
        """
        if _enable_rules_deprecations:
            warn_deprecated(
                "the `iris.fileformats.rules.RulesContainer class is deprecated."
            )
        self._rules = []
        self.rule_type = rule_type
        if filepath is not None:
            self.import_rules(filepath)
Exemplo n.º 34
0
def load_cubes(filenames, callback=None, auto_regularise=True):
    """
    Returns a generator of cubes from the given list of filenames.

    Args:

    * filenames (string/list):
        One or more GRIB filenames to load from.

    Kwargs:

    * callback (callable function):
        Function which can be passed on to :func:`iris.io.run_callback`.

    * auto_regularise (*True* | *False*):
        If *True*, any cube defined on a reduced grid will be interpolated
        to an equivalent regular grid. If *False*, any cube defined on a
        reduced grid will be loaded on the raw reduced grid with no shape
        information. If `iris.FUTURE.strict_grib_load` is `True` then this
        keyword has no effect, raw grids are always used. If the older GRIB
        loader is in use then the default behaviour is to interpolate cubes
        on a reduced grid to an equivalent regular grid.

        .. deprecated:: 1.8. Please use strict_grib_load and regrid instead.


    """
    if iris.FUTURE.strict_grib_load:
        grib_loader = iris.fileformats.rules.Loader(
            GribMessage.messages_from_filename, {},
            iris.fileformats.grib._load_convert.convert)
    else:
        if auto_regularise is not None:
            # The old loader supports the auto_regularise keyword, but in
            # deprecation mode, so warning if it is found.
            msg = ('the`auto_regularise` kwarg is deprecated and '
                   'will be removed in a future release. Resampling '
                   'quasi-regular grids on load will no longer be '
                   'available.  Resampling should be done on the '
                   'loaded cube instead using Cube.regrid.')
            warn_deprecated(msg)

        grib_loader = iris.fileformats.rules.Loader(
            grib_generator, {'auto_regularise': auto_regularise},
            iris.fileformats.grib.load_rules.convert)
    return iris.fileformats.rules.load_cubes(filenames, callback, grib_loader)
Exemplo n.º 35
0
 def __setattr__(self, name, value):
     if name in self.deprecated_options:
         level = self.deprecated_options[name]
         if level == 'error' and not value:
             emsg = ("setting the 'Future' property {prop!r} has been "
                     "deprecated to be removed in a future release, and "
                     "deprecated {prop!r} behaviour has been removed. "
                     "Please remove code that sets this property.")
             raise AttributeError(emsg.format(prop=name))
         else:
             msg = ("setting the 'Future' property {!r} is deprecated "
                    "and will be removed in a future release. "
                    "Please remove code that sets this property.")
             warn_deprecated(msg.format(name))
     if name not in self.__dict__:
         msg = "'Future' object has no attribute {!r}".format(name)
         raise AttributeError(msg)
     self.__dict__[name] = value
Exemplo n.º 36
0
 def __setattr__(self, name, value):
     if name in self.deprecated_options:
         level = self.deprecated_options[name]
         if level == 'error' and not value:
             emsg = ("setting the 'Future' property {prop!r} has been "
                     "deprecated to be removed in a future release, and "
                     "deprecated {prop!r} behaviour has been removed. "
                     "Please remove code that sets this property.")
             raise AttributeError(emsg.format(prop=name))
         else:
             msg = ("setting the 'Future' property {!r} is deprecated "
                    "and will be removed in a future release. "
                    "Please remove code that sets this property.")
             warn_deprecated(msg.format(name))
     if name not in self.__dict__:
         msg = "'Future' object has no attribute {!r}".format(name)
         raise AttributeError(msg)
     self.__dict__[name] = value
Exemplo n.º 37
0
def grib_generator(filename, auto_regularise=True):
    """
    Returns a generator of :class:`~iris.fileformats.grib.GribWrapper`
    fields from the given filename.

    .. deprecated:: 1.10

    The function:
    :meth:`iris.fileformats.grib.message.GribMessage.messages_from_filename`
    provides alternative means of obtainig GRIB messages from a file.

    Args:

    * filename (string):
        Name of the file to generate fields from.

    Kwargs:

    * auto_regularise (*True* | *False*):
        If *True*, any field defined on a reduced grid will be interpolated
        to an equivalent regular grid. If *False*, any field defined on a
        reduced grid will be loaded on the raw reduced grid with no shape
        information. The default behaviour is to interpolate fields on a
        reduced grid to an equivalent regular grid.

    """
    warn_deprecated('Deprecated at version 1.10')
    with open(filename, 'rb') as grib_fh:
        while True:
            grib_message = gribapi.grib_new_from_file(grib_fh)
            if grib_message is None:
                break

            grib_wrapper = GribWrapper(grib_message, grib_fh, auto_regularise)

            yield grib_wrapper

            # finished with the grib message - claimed by the ecmwf c library.
            gribapi.grib_release(grib_message)
Exemplo n.º 38
0
def grib_generator(filename, auto_regularise=True):
    """
    Returns a generator of :class:`~iris.fileformats.grib.GribWrapper`
    fields from the given filename.

    .. deprecated:: 1.10

    The function:
    :meth:`iris.fileformats.grib.message.GribMessage.messages_from_filename`
    provides alternative means of obtainig GRIB messages from a file.

    Args:

    * filename (string):
        Name of the file to generate fields from.

    Kwargs:

    * auto_regularise (*True* | *False*):
        If *True*, any field defined on a reduced grid will be interpolated
        to an equivalent regular grid. If *False*, any field defined on a
        reduced grid will be loaded on the raw reduced grid with no shape
        information. The default behaviour is to interpolate fields on a
        reduced grid to an equivalent regular grid.

    """
    warn_deprecated('Deprecated at version 1.10')
    with open(filename, 'rb') as grib_fh:
        while True:
            grib_message = gribapi.grib_new_from_file(grib_fh)
            if grib_message is None:
                break

            grib_wrapper = GribWrapper(grib_message, grib_fh, auto_regularise)

            yield grib_wrapper

            # finished with the grib message - claimed by the ecmwf c library.
            gribapi.grib_release(grib_message)
Exemplo n.º 39
0
def log(*args, **kwargs):
    if _enable_rules_deprecations:
        warn_deprecated(
            "The `iris.fileformats.rules.log()` method is deprecated.")
    return _log_rules(*args, **kwargs)
Exemplo n.º 40
0
def nearest_neighbour_indices(cube, sample_points):
    """
    Returns the indices to select the data value(s) closest to the given coordinate point values.

    The sample_points mapping does not have to include coordinate values corresponding to all data
    dimensions. Any dimensions unspecified will default to a full slice.

    For example:

        >>> cube = iris.load_cube(iris.sample_data_path('ostia_monthly.nc'))
        >>> iris.analysis.interpolate.nearest_neighbour_indices(cube, [('latitude', 0), ('longitude', 10)])
        (slice(None, None, None), 9, 12)
        >>> iris.analysis.interpolate.nearest_neighbour_indices(cube, [('latitude', 0)])
        (slice(None, None, None), 9, slice(None, None, None))

    Args:

    * cube:
        An :class:`iris.cube.Cube`.
    * sample_points
        A list of tuple pairs mapping coordinate instances or unique coordinate names in the cube to point values.

    Returns:
        The tuple of indices which will select the point in the cube closest to the supplied coordinate values.

    .. note::

        Nearest neighbour interpolation of multidimensional coordinates is not
        yet supported.

    .. deprecated:: 1.10

        The module :mod:`iris.analysis.interpolate` is deprecated.
        Please replace usage of
        :func:`iris.analysis.interpolate.nearest_neighbour_indices`
        with :meth:`iris.coords.Coord.nearest_neighbour_index`.

    """
    if isinstance(sample_points, dict):
        msg = ('Providing a dictionary to specify points is deprecated. '
               'Please provide a list of (coordinate, values) pairs.')
        warn_deprecated(msg)
        sample_points = list(sample_points.items())

    if sample_points:
        try:
            coord, values = sample_points[0]
        except ValueError:
            raise ValueError('Sample points must be a list of (coordinate, value) pairs. Got %r.' % sample_points)

    points = []
    for coord, values in sample_points:
        if isinstance(coord, six.string_types):
            coord = cube.coord(coord)
        else:
            coord = cube.coord(coord)
        points.append((coord, values))
    sample_points = points

    # Build up a list of indices to span the cube.
    indices = [slice(None, None)] * cube.ndim
    
    # Build up a dictionary which maps the cube's data dimensions to a list (which will later
    # be populated by coordinates in the sample points list)
    dim_to_coord_map = {}
    for i in range(cube.ndim):
        dim_to_coord_map[i] = []

    # Iterate over all of the specifications provided by sample_points
    for coord, point in sample_points:
        data_dim = cube.coord_dims(coord)

        # If no data dimension then we don't need to make any modifications to indices.
        if not data_dim:
            continue
        elif len(data_dim) > 1:
            raise iris.exceptions.CoordinateMultiDimError("Nearest neighbour interpolation of multidimensional "
                                                          "coordinates is not supported.")
        data_dim = data_dim[0]

        dim_to_coord_map[data_dim].append(coord)

        #calculate the nearest neighbour
        min_index = coord.nearest_neighbour_index(point)

        if getattr(coord, 'circular', False):
            warnings.warn("Nearest neighbour on a circular coordinate may not be picking the nearest point.", DeprecationWarning)

        # If the dimension has already been interpolated then assert that the index from this coordinate
        # agrees with the index already calculated, otherwise we have a contradicting specification
        if indices[data_dim] != slice(None, None) and min_index != indices[data_dim]:
            raise ValueError('The coordinates provided (%s) over specify dimension %s.' %
                                        (', '.join([coord.name() for coord in dim_to_coord_map[data_dim]]), data_dim))

        indices[data_dim] = min_index

    return tuple(indices)
Exemplo n.º 41
0
def convert(grib):
    """
    Converts a GRIB message into the corresponding items of Cube metadata.

    Args:

    * grib:
        A :class:`~iris.fileformats.grib.GribWrapper` object.

    Returns:
        A :class:`iris.fileformats.rules.ConversionMetadata` object.

    """
    factories = []
    references = []
    standard_name = None
    long_name = None
    units = None
    attributes = {}
    cell_methods = []
    dim_coords_and_dims = []
    aux_coords_and_dims = []

    # deprecation warning for this code path for edition 2 messages
    if grib.edition == 2:
        msg = ('This GRIB loader is deprecated and will be removed in '
              'a future release.  Please consider using the new '
              'GRIB loader by setting the :class:`iris.Future` '
              'option `strict_grib_load` to True; e.g.:\n'
              'iris.FUTURE.strict_grib_load = True\n'
              'Please report issues you experience to:\n'
              'https://groups.google.com/forum/#!topic/scitools-iris-dev/'
              'lMsOusKNfaU')
        warn_deprecated(msg)

    if \
            (grib.gridType=="reduced_gg"):
        aux_coords_and_dims.append((AuxCoord(grib._y_points, grib._y_coord_name, units='degrees', coord_system=grib._coord_system), 0))
        aux_coords_and_dims.append((AuxCoord(grib._x_points, grib._x_coord_name, units='degrees', coord_system=grib._coord_system), 0))

    if \
            (grib.gridType=="regular_ll") and \
            (grib.jPointsAreConsecutive == 0):
        dim_coords_and_dims.append((DimCoord(grib._y_points, grib._y_coord_name, units='degrees', coord_system=grib._coord_system), 0))
        dim_coords_and_dims.append((DimCoord(grib._x_points, grib._x_coord_name, units='degrees', coord_system=grib._coord_system, circular=grib._x_circular), 1))

    if \
            (grib.gridType=="regular_ll") and \
            (grib.jPointsAreConsecutive == 1):
        dim_coords_and_dims.append((DimCoord(grib._y_points, grib._y_coord_name, units='degrees', coord_system=grib._coord_system), 1))
        dim_coords_and_dims.append((DimCoord(grib._x_points, grib._x_coord_name, units='degrees', coord_system=grib._coord_system, circular=grib._x_circular), 0))

    if \
            (grib.gridType=="regular_gg") and \
            (grib.jPointsAreConsecutive == 0):
        dim_coords_and_dims.append((DimCoord(grib._y_points, grib._y_coord_name, units='degrees', coord_system=grib._coord_system), 0))
        dim_coords_and_dims.append((DimCoord(grib._x_points, grib._x_coord_name, units='degrees', coord_system=grib._coord_system, circular=grib._x_circular), 1))

    if \
            (grib.gridType=="regular_gg") and \
            (grib.jPointsAreConsecutive == 1):
        dim_coords_and_dims.append((DimCoord(grib._y_points, grib._y_coord_name, units='degrees', coord_system=grib._coord_system), 1))
        dim_coords_and_dims.append((DimCoord(grib._x_points, grib._x_coord_name, units='degrees', coord_system=grib._coord_system, circular=grib._x_circular), 0))

    if \
            (grib.gridType=="rotated_ll") and \
            (grib.jPointsAreConsecutive == 0):
        dim_coords_and_dims.append((DimCoord(grib._y_points, grib._y_coord_name, units='degrees', coord_system=grib._coord_system), 0))
        dim_coords_and_dims.append((DimCoord(grib._x_points, grib._x_coord_name, units='degrees', coord_system=grib._coord_system, circular=grib._x_circular), 1))

    if \
            (grib.gridType=="rotated_ll") and \
            (grib.jPointsAreConsecutive == 1):
        dim_coords_and_dims.append((DimCoord(grib._y_points, grib._y_coord_name, units='degrees', coord_system=grib._coord_system), 1))
        dim_coords_and_dims.append((DimCoord(grib._x_points, grib._x_coord_name, units='degrees', coord_system=grib._coord_system, circular=grib._x_circular), 0))

    if grib.gridType in ["polar_stereographic", "lambert"]:
        dim_coords_and_dims.append((DimCoord(grib._y_points, grib._y_coord_name, units="m", coord_system=grib._coord_system), 0))
        dim_coords_and_dims.append((DimCoord(grib._x_points, grib._x_coord_name, units="m", coord_system=grib._coord_system), 1))

    if \
            (grib.edition == 1) and \
            (grib.table2Version < 128) and \
            (grib.indicatorOfParameter == 11) and \
            (grib._cf_data is None):
        standard_name = "air_temperature"
        units = "kelvin"

    if \
            (grib.edition == 1) and \
            (grib.table2Version < 128) and \
            (grib.indicatorOfParameter == 33) and \
            (grib._cf_data is None):
        standard_name = "x_wind"
        units = "m s-1"

    if \
            (grib.edition == 1) and \
            (grib.table2Version < 128) and \
            (grib.indicatorOfParameter == 34) and \
            (grib._cf_data is None):
        standard_name = "y_wind"
        units = "m s-1"

    if \
            (grib.edition == 1) and \
            (grib._cf_data is not None):
        standard_name = grib._cf_data.standard_name
        long_name = grib._cf_data.standard_name or grib._cf_data.long_name
        units = grib._cf_data.units

    if \
            (grib.edition == 1) and \
            (grib.table2Version >= 128) and \
            (grib._cf_data is None):
        long_name = "UNKNOWN LOCAL PARAM " + str(grib.indicatorOfParameter) + "." + str(grib.table2Version)
        units = "???"

    if \
            (grib.edition == 1) and \
            (grib.table2Version == 1) and \
            (grib.indicatorOfParameter >= 128):
        long_name = "UNKNOWN LOCAL PARAM " + str(grib.indicatorOfParameter) + "." + str(grib.table2Version)
        units = "???"

    if \
            (grib.edition == 2) and \
            (grib._cf_data is not None):
        standard_name = grib._cf_data.standard_name
        long_name = grib._cf_data.long_name
        units = grib._cf_data.units

    if \
            (grib.edition == 1) and \
            (grib._phenomenonDateTime != -1.0):
        aux_coords_and_dims.append((DimCoord(points=grib.startStep, standard_name='forecast_period', units=grib._forecastTimeUnit), None))
        aux_coords_and_dims.append((DimCoord(points=grib.phenomenon_points('hours'), standard_name='time', units=Unit('hours since epoch', CALENDAR_GREGORIAN)), None))

    def add_bounded_time_coords(aux_coords_and_dims, grib):
        t_bounds = grib.phenomenon_bounds('hours')
        period = Unit('hours').convert(t_bounds[1] - t_bounds[0],
                                       grib._forecastTimeUnit)
        aux_coords_and_dims.append((
            DimCoord(standard_name='forecast_period',
                     units=grib._forecastTimeUnit,
                     points=grib._forecastTime + 0.5 * period,
                     bounds=[grib._forecastTime, grib._forecastTime + period]),
            None))
        aux_coords_and_dims.append((
            DimCoord(standard_name='time',
                     units=Unit('hours since epoch', CALENDAR_GREGORIAN),
                     points=0.5 * (t_bounds[0] + t_bounds[1]),
                     bounds=t_bounds),
            None))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 2):
        add_bounded_time_coords(aux_coords_and_dims, grib)

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 3):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("mean", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 4):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("sum", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 5):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("_difference", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 51):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("mean", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 113):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("mean", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 114):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("sum", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 115):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("mean", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 116):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("sum", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 117):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("mean", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 118):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("_covariance", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 123):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("mean", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 124):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("sum", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 125):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("standard_deviation", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 0):
        aux_coords_and_dims.append((DimCoord(points=Unit(grib._forecastTimeUnit).convert(np.int32(grib._forecastTime), "hours"), standard_name='forecast_period', units="hours"), None))
        aux_coords_and_dims.append((DimCoord(points=grib.phenomenon_points('hours'), standard_name='time', units=Unit('hours since epoch', CALENDAR_GREGORIAN)), None))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber in (8, 9)):
        add_bounded_time_coords(aux_coords_and_dims, grib)

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 8) and \
            (grib.typeOfStatisticalProcessing == 0):
        cell_methods.append(CellMethod("mean", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 8) and \
            (grib.typeOfStatisticalProcessing == 1):
        cell_methods.append(CellMethod("sum", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 8) and \
            (grib.typeOfStatisticalProcessing == 2):
        cell_methods.append(CellMethod("maximum", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 8) and \
            (grib.typeOfStatisticalProcessing == 3):
        cell_methods.append(CellMethod("minimum", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 8) and \
            (grib.typeOfStatisticalProcessing == 4):
        cell_methods.append(CellMethod("_difference", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 8) and \
            (grib.typeOfStatisticalProcessing == 5):
        cell_methods.append(CellMethod("_root_mean_square", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 8) and \
            (grib.typeOfStatisticalProcessing == 6):
        cell_methods.append(CellMethod("standard_deviation", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 8) and \
            (grib.typeOfStatisticalProcessing == 7):
        cell_methods.append(CellMethod("_convariance", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 8) and \
            (grib.typeOfStatisticalProcessing == 8):
        cell_methods.append(CellMethod("_difference", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 8) and \
            (grib.typeOfStatisticalProcessing == 9):
        cell_methods.append(CellMethod("_ratio", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.levelType == 'pl'):
        aux_coords_and_dims.append((DimCoord(points=grib.level,  long_name="pressure", units="hPa"), None))

    if \
            (grib.edition == 1) and \
            (grib.levelType == 'sfc'):

            if (grib._cf_data is not None) and \
            (grib._cf_data.set_height is not None):
                aux_coords_and_dims.append((DimCoord(points=grib._cf_data.set_height,  long_name="height", units="m", attributes={'positive':'up'}), None))
            elif grib.typeOfLevel == 'heightAboveGround': # required for NCAR
                aux_coords_and_dims.append((DimCoord(points=grib.level,  long_name="height", units="m", attributes={'positive':'up'}), None))

    if \
            (grib.edition == 1) and \
            (grib.levelType == 'ml') and \
            (hasattr(grib, 'pv')):
        aux_coords_and_dims.append((AuxCoord(grib.level, standard_name='model_level_number', attributes={'positive': 'up'}), None))
        aux_coords_and_dims.append((DimCoord(grib.pv[grib.level], long_name='level_pressure', units='Pa'), None))
        aux_coords_and_dims.append((AuxCoord(grib.pv[grib.numberOfCoordinatesValues//2 + grib.level], long_name='sigma'), None))
        factories.append(Factory(HybridPressureFactory, [{'long_name': 'level_pressure'}, {'long_name': 'sigma'}, Reference('surface_pressure')]))

    if \
            (grib.edition == 2) and \
            (grib.typeOfFirstFixedSurface != grib.typeOfSecondFixedSurface):
        warnings.warn("Different vertical bound types not yet handled.")

    if \
            (grib.edition == 2) and \
            (grib.typeOfFirstFixedSurface == 103) and \
            (grib.typeOfSecondFixedSurface == 255):
        aux_coords_and_dims.append((DimCoord(points=grib.scaledValueOfFirstFixedSurface/(10.0**grib.scaleFactorOfFirstFixedSurface), standard_name="height", units="m"), None))

    if \
            (grib.edition == 2) and \
            (grib.typeOfFirstFixedSurface == 103) and \
            (grib.typeOfSecondFixedSurface != 255):
        aux_coords_and_dims.append((DimCoord(points=0.5*(grib.scaledValueOfFirstFixedSurface/(10.0**grib.scaleFactorOfFirstFixedSurface) + grib.scaledValueOfSecondFixedSurface/(10.0**grib.scaleFactorOfSecondFixedSurface)), standard_name="height", units="m", bounds=[grib.scaledValueOfFirstFixedSurface/(10.0**grib.scaleFactorOfFirstFixedSurface), grib.scaledValueOfSecondFixedSurface/(10.0**grib.scaleFactorOfSecondFixedSurface)]), None))

    if \
            (grib.edition == 2) and \
            (grib.typeOfFirstFixedSurface == 100) and \
            (grib.typeOfSecondFixedSurface == 255):
        aux_coords_and_dims.append((DimCoord(points=grib.scaledValueOfFirstFixedSurface/(10.0**grib.scaleFactorOfFirstFixedSurface), long_name="pressure", units="Pa"), None))

    if \
            (grib.edition == 2) and \
            (grib.typeOfFirstFixedSurface == 100) and \
            (grib.typeOfSecondFixedSurface != 255):
        aux_coords_and_dims.append((DimCoord(points=0.5*(grib.scaledValueOfFirstFixedSurface/(10.0**grib.scaleFactorOfFirstFixedSurface) + grib.scaledValueOfSecondFixedSurface/(10.0**grib.scaleFactorOfSecondFixedSurface)), long_name="pressure", units="Pa", bounds=[grib.scaledValueOfFirstFixedSurface/(10.0**grib.scaleFactorOfFirstFixedSurface), grib.scaledValueOfSecondFixedSurface/(10.0**grib.scaleFactorOfSecondFixedSurface)]), None))

    if \
            (grib.edition == 2) and \
            (grib.typeOfFirstFixedSurface in [105, 119]) and \
            (grib.numberOfCoordinatesValues > 0):
        aux_coords_and_dims.append((AuxCoord(grib.scaledValueOfFirstFixedSurface, standard_name='model_level_number', attributes={'positive': 'up'}), None))
        aux_coords_and_dims.append((DimCoord(grib.pv[grib.scaledValueOfFirstFixedSurface], long_name='level_pressure', units='Pa'), None))
        aux_coords_and_dims.append((AuxCoord(grib.pv[grib.numberOfCoordinatesValues//2 + grib.scaledValueOfFirstFixedSurface], long_name='sigma'), None))
        factories.append(Factory(HybridPressureFactory, [{'long_name': 'level_pressure'}, {'long_name': 'sigma'}, Reference('surface_air_pressure')]))

    if grib._originatingCentre != 'unknown':
        aux_coords_and_dims.append((AuxCoord(points=grib._originatingCentre, long_name='originating_centre', units='no_unit'), None))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 1):
        aux_coords_and_dims.append((DimCoord(points=grib.perturbationNumber, long_name='ensemble_member', units='no_unit'), None))

    if \
            (grib.edition == 2) and \
            grib.productDefinitionTemplateNumber not in (0, 8):
        attributes["GRIB_LOAD_WARNING"] = ("unsupported GRIB%d ProductDefinitionTemplate: #4.%d" % (grib.edition, grib.productDefinitionTemplateNumber))

    if \
            (grib.edition == 2) and \
            (grib.centre == 'ecmf') and \
            (grib.discipline == 0) and \
            (grib.parameterCategory == 3) and \
            (grib.parameterNumber == 25) and \
            (grib.typeOfFirstFixedSurface == 105):
        references.append(ReferenceTarget('surface_air_pressure', lambda cube: {'standard_name': 'surface_air_pressure', 'units': 'Pa', 'data': np.exp(cube.data)}))

    return ConversionMetadata(factories, references, standard_name, long_name,
                              units, attributes, cell_methods,
                              dim_coords_and_dims, aux_coords_and_dims)
Exemplo n.º 42
0
def log(*args, **kwargs):
    if _enable_rules_deprecations:
        warn_deprecated(
            "The `iris.fileformats.rules.log()` method is deprecated.")
    return _log_rules(*args, **kwargs)
Exemplo n.º 43
0
def _nearest_neighbour_indices_ndcoords(cube, sample_points, cache=None):
    """
    See documentation for :func:`iris.analysis.interpolate.nearest_neighbour_indices`.

    'sample_points' is of the form [[coord-or-coord-name, point-value(s)]*].
    The lengths of all the point-values sequences must be equal.

    This function is adapted for points sampling a multi-dimensional coord,
    and can currently only do nearest neighbour interpolation.

    Because this function can be slow for multidimensional coordinates,
    a 'cache' dictionary can be provided by the calling code.

    """

    # Developer notes:
    # A "sample space cube" is made which only has the coords and dims we are sampling on.
    # We get the nearest neighbour using this sample space cube.

    if isinstance(sample_points, dict):
        msg = ('Providing a dictionary to specify points is deprecated. '
               'Please provide a list of (coordinate, values) pairs.')
        warn_deprecated(msg)
        sample_points = list(sample_points.items())

    if sample_points:
        try:
            coord, value = sample_points[0]
        except ValueError:
            raise ValueError('Sample points must be a list of (coordinate, value) pairs. Got %r.' % sample_points)

    # Convert names to coords in sample_point
    # Reformat sample point values for use in _cartesian_sample_points(), below.
    coord_values = []
    sample_point_coords = []
    sample_point_coord_names = []
    ok_coord_ids = set(map(id, cube.dim_coords + cube.aux_coords))
    for coord, value in sample_points:
        coord = cube.coord(coord)
        if id(coord) not in ok_coord_ids:
            msg = ('Invalid sample coordinate {!r}: derived coordinates are'
                   ' not allowed.'.format(coord.name()))
            raise ValueError(msg)
        sample_point_coords.append(coord)
        sample_point_coord_names.append(coord.name())
        value = np.array(value, ndmin=1)
        coord_values.append(value)

    coord_point_lens = np.array([len(value) for value in coord_values])
    if not np.all(coord_point_lens == coord_point_lens[0]):
        msg = 'All coordinates must have the same number of sample points.'
        raise ValueError(msg)

    coord_values = np.array(coord_values)

    # Which dims are we sampling?
    sample_dims = set()
    for coord in sample_point_coords:
        for dim in cube.coord_dims(coord):
            sample_dims.add(dim)
    sample_dims = sorted(list(sample_dims))

    # Extract a sub cube that lives in just the sampling space.
    sample_space_slice = [0] * cube.ndim
    for sample_dim in sample_dims:
        sample_space_slice[sample_dim] = slice(None, None)
    sample_space_slice = tuple(sample_space_slice)
    sample_space_cube = cube[sample_space_slice]

    #...with just the sampling coords
    for coord in sample_space_cube.coords():
        if not coord.name() in sample_point_coord_names:
            sample_space_cube.remove_coord(coord)

    # Order the sample point coords according to the sample space cube coords
    sample_space_coord_names = [coord.name() for coord in sample_space_cube.coords()]
    new_order = [sample_space_coord_names.index(name) for name in sample_point_coord_names]
    coord_values = np.array([coord_values[i] for i in new_order])
    sample_point_coord_names = [sample_point_coord_names[i] for i in new_order]

    sample_space_coords = sample_space_cube.dim_coords + sample_space_cube.aux_coords
    sample_space_coords_and_dims = [(coord, sample_space_cube.coord_dims(coord)) for coord in sample_space_coords]

    if cache is not None and cube in cache:
        kdtree = cache[cube]
    else:
        # Create a "sample space position" for each datum: sample_space_data_positions[coord_index][datum_index]
        sample_space_data_positions = np.empty((len(sample_space_coords_and_dims), sample_space_cube.data.size), dtype=float)
        for d, ndi in enumerate(np.ndindex(sample_space_cube.data.shape)):
            for c, (coord, coord_dims) in enumerate(sample_space_coords_and_dims):
                # Index of this datum along this coordinate (could be nD).
                keys = tuple(ndi[ind] for ind in coord_dims) if coord_dims else slice(None, None)
                # Position of this datum along this coordinate.
                sample_space_data_positions[c][d] = coord.points[keys]

        # Convert to cartesian coordinates. Flatten for kdtree compatibility.
        cartesian_space_data_coords = _cartesian_sample_points(sample_space_data_positions, sample_point_coord_names)

        # Create a kdtree for the nearest-distance lookup to these 3d points.
        kdtree = scipy.spatial.cKDTree(cartesian_space_data_coords)
        # This can find the nearest datum point to any given target point,
        # which is the goal of this function.

    # Update cache
    if cache is not None:
        cache[cube] = kdtree

    # Convert the sample points to cartesian (3d) coords.
    # If there is no latlon within the coordinate there will be no change.
    # Otherwise, geographic latlon is replaced with cartesian xyz.
    cartesian_sample_points = _cartesian_sample_points(
        coord_values, sample_point_coord_names)

    # Use kdtree to get the nearest sourcepoint index for each target point.
    _, datum_index_lists = kdtree.query(cartesian_sample_points)

    # Convert flat indices back into multidimensional sample-space indices.
    sample_space_dimension_indices = np.unravel_index(
        datum_index_lists, sample_space_cube.data.shape)
    # Convert this from "pointwise list of index arrays for each dimension",
    # to "list of cube indices for each point".
    sample_space_ndis = np.array(sample_space_dimension_indices).transpose()

    # For the returned result, we must convert these indices into the source
    # (sample-space) cube, to equivalent indices into the target 'cube'.

    # Make a result array: (cube.ndim * <index>), per sample point.
    n_points = coord_values.shape[-1]
    main_cube_slices = np.empty((n_points, cube.ndim), dtype=object)
    # Initialise so all unused indices are ":".
    main_cube_slices[:] = slice(None)

    # Move result indices according to the source (sample) and target (cube)
    # dimension mappings.
    for sample_coord, sample_coord_dims in sample_space_coords_and_dims:
        # Find the coord in the main cube
        main_coord = cube.coord(sample_coord.name())
        main_coord_dims = cube.coord_dims(main_coord)
        # Fill nearest-point data indices for each coord dimension.
        for sample_i, main_i in zip(sample_coord_dims, main_coord_dims):
            main_cube_slices[:, main_i] = sample_space_ndis[:, sample_i]

    # Return as a list of **tuples** : required for correct indexing usage.
    result = [tuple(inds) for inds in main_cube_slices]
    return result
Exemplo n.º 44
0
def load(filenames, callback=None):
    """
    Load structured FieldsFiles and PP files.

    Args:

    * filenames:
        One or more filenames.


    Kwargs:

    * callback:
        A modifier/filter function. Please see the module documentation
        for :mod:`iris`.

        .. note::

            Unlike the standard :func:`iris.load` operation, the callback is
            applied to the final result cubes, not individual input fields.

    Returns:
        An :class:`iris.cube.CubeList`.


    This is a streamlined load operation, to be used only on fieldsfiles or PP
    files whose fields repeat regularly over the same vertical levels and
    times. The results aim to be equivalent to those generated by
    :func:`iris.load`, but the operation is substantially faster for input that
    is structured.

    The structured input files should conform to the following requirements:

    *  the file must contain fields for all possible combinations of the
       vertical levels and time points found in the file.

    *  the fields must occur in a regular repeating order within the file.

       (For example: a sequence of fields for NV vertical levels, repeated
       for NP different forecast periods, repeated for NT different forecast
       times).

    *  all other metadata must be identical across all fields of the same
       phenomenon.

    Each group of fields with the same values of LBUSER4, LBUSER7 and LBPROC
    is identified as a separate phenomenon:  These groups are processed
    independently and returned as separate result cubes.

    .. note::

        Each input file is loaded independently.  Thus a single result cube can
        not combine data from multiple input files.

    .. note::

        The resulting time-related coordinates ('time', 'forecast_time' and
        'forecast_period') may be mapped to shared cube dimensions and in some
        cases can also be multidimensional.  However, the vertical level
        information *must* have a simple one-dimensional structure, independent
        of the time points, otherwise an error will be raised.

    .. note::

        Where input data does *not* have a fully regular arrangement, the
        corresponding result cube will have a single anonymous extra dimension
        which indexes over all the input fields.

        This can happen if, for example, some fields are missing; or have
        slightly different metadata; or appear out of order in the file.

    .. warning::

        Any non-regular metadata variation in the input should be strictly
        avoided, as not all irregularities are detected, which can cause
        erroneous results.


    """
    warn_deprecated(
        "The module 'iris.experimental.fieldsfile' is deprecated. "
        "Please use the 'iris.fileformats.um.structured_um_loading' facility "
        "as a replacement."
        "\nA call to 'iris.experimental.fieldsfile.load' can be replaced with "
        "'iris.load_raw', within a 'structured_um_loading' context.")
    loader = Loader(_collations_from_filename, {}, _convert_collation, None)
    return CubeList(load_cubes(filenames, callback, loader, None))
Exemplo n.º 45
0
def curl(i_cube, j_cube, k_cube=None, ignore=None):
    r"""
    Calculate the 2-dimensional or 3-dimensional spherical or cartesian
    curl of the given vector of cubes.

    As well as the standard x and y coordinates, this function requires each
    cube to possess a vertical or z-like coordinate (representing some form
    of height or pressure).  This can be a scalar or dimension coordinate.

    Args:

    * i_cube
        The i cube of the vector to operate on
    * j_cube
        The j cube of the vector to operate on

    Kwargs:

    * k_cube
        The k cube of the vector to operate on
    * ignore
        This argument is not used.

        .. deprecated:: 0.8
            The coordinates to ignore are determined automatically.

    Return (i_cmpt_curl_cube, j_cmpt_curl_cube, k_cmpt_curl_cube)

    If the k-cube is not passed in then the 2-dimensional curl will
    be calculated, yielding the result: [None, None, k_cube].
    If the k-cube is passed in, the 3-dimensional curl will
    be calculated, returning 3 component cubes.

    All cubes passed in must have the same data units, and those units
    must be spatially-derived (e.g. 'm/s' or 'km/h').

    The calculation of curl is dependent on the type of
    :func:`~iris.coord_systems.CoordSystem` in the cube.
    If the :func:`~iris.coord_systems.CoordSystem` is either
    GeogCS or RotatedGeogCS, the spherical curl will be calculated; otherwise
    the cartesian curl will be calculated:

        Cartesian curl

            When cartesian calculus is used, i_cube is the u component,
            j_cube is the v component and k_cube is the w component.

            The Cartesian curl is defined as:

            .. math::

                \nabla\times \vec u =
                (\frac{\delta w}{\delta y} - \frac{\delta v}{\delta z})\vec a_i
                -
                (\frac{\delta w}{\delta x} - \frac{\delta u}{\delta z})\vec a_j
                +
                (\frac{\delta v}{\delta x} - \frac{\delta u}{\delta y})\vec a_k

        Spherical curl

            When spherical calculus is used, i_cube is the :math:`\phi` vector
            component (e.g. eastward), j_cube is the :math:`\theta` component
            (e.g. northward) and k_cube is the radial component.

            The spherical curl is defined as:

            .. math::

                \nabla\times \vec A = \frac{1}{r cos \theta}
                (\frac{\delta}{\delta \theta}
                (\vec A_\phi cos \theta) -
                \frac{\delta \vec A_\theta}{\delta \phi}) \vec r +
                \frac{1}{r}(\frac{1}{cos \theta}
                \frac{\delta \vec A_r}{\delta \phi} -
                \frac{\delta}{\delta r} (r \vec A_\phi))\vec \theta +
                \frac{1}{r}
                (\frac{\delta}{\delta r}(r \vec A_\theta) -
                \frac{\delta \vec A_r}{\delta \theta}) \vec \phi

            where phi is longitude, theta is latitude.

    """
    if ignore is not None:
        ignore = None
        warn_deprecated('The ignore keyword to iris.analysis.calculus.curl '
                        'is deprecated, ignoring is now done automatically.')

    # Get the vector quantity names.
    # (i.e. ['easterly', 'northerly', 'vertical'])
    vector_quantity_names, phenomenon_name = \
        spatial_vectors_with_phenom_name(i_cube, j_cube, k_cube)

    cubes = filter(None, [i_cube, j_cube, k_cube])

    # get the names of all coords binned into useful comparison groups
    coord_comparison = iris.analysis.coord_comparison(*cubes)

    bad_coords = coord_comparison['ungroupable_and_dimensioned']
    if bad_coords:
        raise ValueError("Coordinates found in one cube that describe "
                         "a data dimension which weren't in the other "
                         "cube ({}), try removing this coordinate.".format(
                             ', '.join(group.name() for group in bad_coords)))

    bad_coords = coord_comparison['resamplable']
    if bad_coords:
        raise ValueError('Some coordinates are different ({}), consider '
                         'resampling.'.format(
                             ', '.join(group.name() for group in bad_coords)))

    ignore_string = ''
    if coord_comparison['ignorable']:
        ignore_string = ' (ignoring {})'.format(
            ', '.join(group.name() for group in bad_coords))

    # Get the dim_coord, or None if none exist, for the xyz dimensions
    x_coord = i_cube.coord(axis='X')
    y_coord = i_cube.coord(axis='Y')
    z_coord = i_cube.coord(axis='Z')

    y_dim = i_cube.coord_dims(y_coord)[0]

    horiz_cs = i_cube.coord_system('CoordSystem')

    # Non-spherical coords?
    spherical_coords = isinstance(horiz_cs, (iris.coord_systems.GeogCS,
                                  iris.coord_systems.RotatedGeogCS))
    if not spherical_coords:

        # TODO Implement some mechanism for conforming to a common grid
        dj_dx = _curl_differentiate(j_cube, x_coord)
        prototype_diff = dj_dx

        # i curl component (dk_dy - dj_dz)
        dk_dy = _curl_differentiate(k_cube, y_coord)
        dk_dy = _curl_regrid(dk_dy, prototype_diff)
        dj_dz = _curl_differentiate(j_cube, z_coord)
        dj_dz = _curl_regrid(dj_dz, prototype_diff)

        # TODO Implement resampling in the vertical (which regridding
        # does not support).
        if dj_dz is not None and dj_dz.data.shape != prototype_diff.data.shape:
            dj_dz = _curl_change_z(dj_dz, z_coord, prototype_diff)

        i_cmpt = _curl_subtract(dk_dy, dj_dz)
        dj_dz = dk_dy = None

        # j curl component (di_dz - dk_dx)
        di_dz = _curl_differentiate(i_cube, z_coord)
        di_dz = _curl_regrid(di_dz, prototype_diff)

        # TODO Implement resampling in the vertical (which regridding
        # does not support).
        if di_dz is not None and di_dz.data.shape != prototype_diff.data.shape:
            di_dz = _curl_change_z(di_dz, z_coord, prototype_diff)

        dk_dx = _curl_differentiate(k_cube, x_coord)
        dk_dx = _curl_regrid(dk_dx, prototype_diff)
        j_cmpt = _curl_subtract(di_dz, dk_dx)
        di_dz = dk_dx = None

        # k curl component ( dj_dx - di_dy)
        di_dy = _curl_differentiate(i_cube, y_coord)
        di_dy = _curl_regrid(di_dy, prototype_diff)
        # Since prototype_diff == dj_dx we don't need to recalculate dj_dx
#        dj_dx = _curl_differentiate(j_cube, x_coord)
#        dj_dx = _curl_regrid(dj_dx, prototype_diff)
        k_cmpt = _curl_subtract(dj_dx, di_dy)
        di_dy = dj_dx = None

        result = [i_cmpt, j_cmpt, k_cmpt]

    # Spherical coords (GeogCS or RotatedGeogCS).
    else:
        # A_\phi = i ; A_\theta = j ; A_\r = k
        # theta = lat ; phi = long ;
        # r_cmpt = 1 / (r * cos(lat)) *
        #    (d/dtheta (i_cube * sin(lat)) - d_j_cube_dphi)
        # phi_cmpt = 1/r * ( d/dr (r * j_cube) - d_k_cube_dtheta)
        # theta_cmpt = 1/r * ( 1/cos(lat) * d_k_cube_dphi - d/dr (r * i_cube)
        if y_coord.name() not in ['latitude', 'grid_latitude'] \
                or x_coord.name() not in ['longitude', 'grid_longitude']:
            raise ValueError('Expecting latitude as the y coord and '
                             'longitude as the x coord for spherical curl.')

        # Get the radius of the earth - and check for sphericity
        ellipsoid = horiz_cs
        if isinstance(horiz_cs, iris.coord_systems.RotatedGeogCS):
            ellipsoid = horiz_cs.ellipsoid
        if ellipsoid:
            # TODO: Add a test for this
            r = ellipsoid.semi_major_axis
            r_unit = cf_units.Unit("m")
            spherical = (ellipsoid.inverse_flattening == 0.0)
        else:
            r = DEFAULT_SPHERICAL_EARTH_RADIUS
            r_unit = DEFAULT_SPHERICAL_EARTH_RADIUS_UNIT
            spherical = True

        if not spherical:
            raise ValueError('Cannot take the curl over a non-spherical '
                             'ellipsoid.')

        lon_coord = x_coord.copy()
        lat_coord = y_coord.copy()
        lon_coord.convert_units('radians')
        lat_coord.convert_units('radians')
        lat_cos_coord = _coord_cos(lat_coord)

        # TODO Implement some mechanism for conforming to a common grid
        temp = iris.analysis.maths.multiply(i_cube, lat_cos_coord, y_dim)
        dicos_dtheta = _curl_differentiate(temp, lat_coord)
        prototype_diff = dicos_dtheta

        # r curl component: 1 / (r * cos(lat)) * (d_j_cube_dphi - dicos_dtheta)
        # Since prototype_diff == dicos_dtheta we don't need to
        # recalculate dicos_dtheta.
        d_j_cube_dphi = _curl_differentiate(j_cube, lon_coord)
        d_j_cube_dphi = _curl_regrid(d_j_cube_dphi, prototype_diff)
        new_lat_coord = d_j_cube_dphi.coord(axis='Y')
        new_lat_cos_coord = _coord_cos(new_lat_coord)
        lat_dim = d_j_cube_dphi.coord_dims(new_lat_coord)[0]
        r_cmpt = iris.analysis.maths.divide(_curl_subtract(d_j_cube_dphi,
                                                           dicos_dtheta),
                                            r * new_lat_cos_coord, dim=lat_dim)
        r_cmpt.units = r_cmpt.units / r_unit
        d_j_cube_dphi = dicos_dtheta = None

        # phi curl component: 1/r * ( drj_dr - d_k_cube_dtheta)
        drj_dr = _curl_differentiate(r * j_cube, z_coord)
        if drj_dr is not None:
            drj_dr.units = drj_dr.units * r_unit
        drj_dr = _curl_regrid(drj_dr, prototype_diff)
        d_k_cube_dtheta = _curl_differentiate(k_cube, lat_coord)
        d_k_cube_dtheta = _curl_regrid(d_k_cube_dtheta, prototype_diff)
        if drj_dr is None and d_k_cube_dtheta is None:
            phi_cmpt = None
        else:
            phi_cmpt = 1/r * _curl_subtract(drj_dr, d_k_cube_dtheta)
            phi_cmpt.units = phi_cmpt.units / r_unit

        drj_dr = d_k_cube_dtheta = None

        # theta curl component: 1/r * ( 1/cos(lat) * d_k_cube_dphi - dri_dr )
        d_k_cube_dphi = _curl_differentiate(k_cube, lon_coord)
        d_k_cube_dphi = _curl_regrid(d_k_cube_dphi, prototype_diff)
        if d_k_cube_dphi is not None:
            d_k_cube_dphi = iris.analysis.maths.divide(d_k_cube_dphi,
                                                       lat_cos_coord)
        dri_dr = _curl_differentiate(r * i_cube, z_coord)
        if dri_dr is not None:
            dri_dr.units = dri_dr.units * r_unit
        dri_dr = _curl_regrid(dri_dr, prototype_diff)
        if d_k_cube_dphi is None and dri_dr is None:
            theta_cmpt = None
        else:
            theta_cmpt = 1/r * _curl_subtract(d_k_cube_dphi, dri_dr)
            theta_cmpt.units = theta_cmpt.units / r_unit
        d_k_cube_dphi = dri_dr = None

        result = [phi_cmpt, theta_cmpt, r_cmpt]

    for direction, cube in zip(vector_quantity_names, result):
        if cube is not None:
            cube.rename('%s curl of %s' % (direction, phenomenon_name))

    return result
Exemplo n.º 46
0
def rle_decode(data, lbrow, lbnpt, bmdi):
    warn_deprecated(_DEPRECATION_WARNING)
    return old_pp_packing.rle_decode(data, lbrow, lbnpt, bmdi)
Exemplo n.º 47
0
.. deprecated:: 1.10
    :mod:`iris.fileformats.pp_packing` is deprecated.
    Please install mo_pack (https://github.com/SciTools/mo_pack) instead.
    This provides additional pack/unpacking functionality.

"""

_DEPRECATION_WARNING = (
    'Module "iris.fileformats.pp_packing" is deprecated.  '
    'Please install mo_pack (https://github.com/SciTools/mo_pack) instead.  '
    'This provides additional pack/unpacking functionality.')


# Emit a deprecation warning when anyone tries to import this.
# For quiet, can still use _old_pp_packing instead, as fileformats.pp does.
warn_deprecated(_DEPRECATION_WARNING)


# Define simple wrappers for functions in pp_packing.
# N.B. signatures must match the originals !
def wgdos_unpack(data, lbrow, lbnpt, bmdi):
    warn_deprecated(_DEPRECATION_WARNING)
    return old_pp_packing.wgdos_unpack(data, lbrow, lbnpt, bmdi)


def rle_decode(data, lbrow, lbnpt, bmdi):
    warn_deprecated(_DEPRECATION_WARNING)
    return old_pp_packing.rle_decode(data, lbrow, lbnpt, bmdi)


def _add_fixed_up_docstring(new_fn, original_fn):
Exemplo n.º 48
0
 def __init__(self, name, value):
     warn_deprecated(
         "the `iris.fileformats.rules.CmCustomAttribute class is "
         "deprecated.")
     self.name = name
     self.value = value
.. deprecated:: 1.10

    This module has now been *deprecated*.
    Please use :mod:`iris.fileformats.um.structured_um_loading` instead.

"""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip)  # noqa

import os

from iris._deprecation import warn_deprecated

# Issue a deprecation message when the module is loaded.
warn_deprecated("The module 'iris.experimental.fieldsfile' is deprecated. "
                "Please use iris.fileformats.um.structured_um_loading"
                "as a replacement.")

from iris.coords import DimCoord
from iris.cube import CubeList
from iris.exceptions import TranslationError
from iris.fileformats import FORMAT_AGENT
from iris.fileformats.um import um_to_pp
from iris.fileformats.pp import load as pp_load
from iris.fileformats.pp_rules import (_convert_time_coords,
                                       _convert_vertical_coords,
                                       _convert_scalar_realization_coords,
                                       _convert_scalar_pseudo_level_coords,
                                       _all_other_rules)
from iris.fileformats.rules import ConversionMetadata, Loader, load_cubes
from iris.fileformats.um._fast_load_structured_fields import \
def load(filenames, callback=None):
    """
    Load structured FieldsFiles and PP files.

    Args:

    * filenames:
        One or more filenames.


    Kwargs:

    * callback:
        A modifier/filter function. Please see the module documentation
        for :mod:`iris`.

        .. note::

            Unlike the standard :func:`iris.load` operation, the callback is
            applied to the final result cubes, not individual input fields.

    Returns:
        An :class:`iris.cube.CubeList`.


    This is a streamlined load operation, to be used only on fieldsfiles or PP
    files whose fields repeat regularly over the same vertical levels and
    times. The results aim to be equivalent to those generated by
    :func:`iris.load`, but the operation is substantially faster for input that
    is structured.

    The structured input files should conform to the following requirements:

    *  the file must contain fields for all possible combinations of the
       vertical levels and time points found in the file.

    *  the fields must occur in a regular repeating order within the file.

       (For example: a sequence of fields for NV vertical levels, repeated
       for NP different forecast periods, repeated for NT different forecast
       times).

    *  all other metadata must be identical across all fields of the same
       phenomenon.

    Each group of fields with the same values of LBUSER4, LBUSER7 and LBPROC
    is identified as a separate phenomenon:  These groups are processed
    independently and returned as separate result cubes.

    .. note::

        Each input file is loaded independently.  Thus a single result cube can
        not combine data from multiple input files.

    .. note::

        The resulting time-related coordinates ('time', 'forecast_time' and
        'forecast_period') may be mapped to shared cube dimensions and in some
        cases can also be multidimensional.  However, the vertical level
        information *must* have a simple one-dimensional structure, independent
        of the time points, otherwise an error will be raised.

    .. note::

        Where input data does *not* have a fully regular arrangement, the
        corresponding result cube will have a single anonymous extra dimension
        which indexes over all the input fields.

        This can happen if, for example, some fields are missing; or have
        slightly different metadata; or appear out of order in the file.

    .. warning::

        Any non-regular metadata variation in the input should be strictly
        avoided, as not all irregularities are detected, which can cause
        erroneous results.


    """
    warn_deprecated(
        "The module 'iris.experimental.fieldsfile' is deprecated. "
        "Please use the 'iris.fileformats.um.structured_um_loading' facility "
        "as a replacement."
        "\nA call to 'iris.experimental.fieldsfile.load' can be replaced with "
        "'iris.load_raw', within a 'structured_um_loading' context.")
    loader = Loader(_collations_from_filename, {}, _convert_collation, None)
    return CubeList(load_cubes(filenames, callback, loader, None))
Exemplo n.º 51
0
    This module has been deprecated. Please use lazy imports instead.

Provision of a service to handle missing packages at runtime.
Current just a very thin layer but gives the option to extend
handling as much as needed

"""

from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip)  # noqa

import sys

from iris._deprecation import warn_deprecated

warn_deprecated('iris.proxy is deprecated in Iris v1.9. Please use lazy '
                'imports instead.')


class FakeModule(object):
    __slots__ = ('_name', )

    def __init__(self, name):
        self._name = name

    def __setattr__(self, name, value):
        object.__setattr__(self, name, value)

    def __getattr__(self, name):
        raise AttributeError(
            'Module "{}" not available or not installed'.format(self._name))
Exemplo n.º 52
0
import cf_units
import numpy as np
import numpy.ma as ma
from osgeo import gdal, osr

import iris
from iris._deprecation import warn_deprecated
import iris.coord_systems

wmsg = (
    "iris.experimental.raster is deprecated since version 3.2, and will be "
    "removed in a future release. If you make use of this functionality, "
    "please contact the Iris Developers to discuss how to retain it (which may "
    "involve reversing the deprecation)."
)
warn_deprecated(wmsg)

_GDAL_DATATYPES = {
    "i2": gdal.GDT_Int16,
    "i4": gdal.GDT_Int32,
    "u1": gdal.GDT_Byte,
    "u2": gdal.GDT_UInt16,
    "u4": gdal.GDT_UInt32,
    "f4": gdal.GDT_Float32,
    "f8": gdal.GDT_Float64,
}


def _gdal_write_array(
    x_min, x_step, y_max, y_step, coord_system, data, fname, ftype
):
Exemplo n.º 53
0
def export_geotiff(cube, fname):
    """
    Writes cube data to raster file format as a PixelIsArea GeoTiff image.

    .. deprecated:: 3.2.0

        This method is scheduled to be removed in a future release, and no
        replacement is currently planned.
        If you make use of this functionality, please contact the Iris
        Developers to discuss how to retain it (which could include reversing
        the deprecation).

    Args:
        * cube (Cube): The 2D regularly gridded cube slice to be exported.
                       The cube must have regular, contiguous bounds.
        * fname (string): Output file name.

    .. note::

        For more details on GeoTiff specification and PixelIsArea, see:
        http://www.remotesensing.org/geotiff/spec/geotiff2.5.html#2.5.2.2

    """
    wmsg = (
        "iris.experimental.raster.export_geotiff has been deprecated, and will "
        "be removed in a future release.  Please consult the docstring for "
        "details."
    )
    warn_deprecated(wmsg)

    if cube.ndim != 2:
        raise ValueError("The cube must be two dimensional.")

    coord_x = cube.coord(axis="X", dim_coords=True)
    coord_y = cube.coord(axis="Y", dim_coords=True)

    if coord_x.bounds is None or coord_y.bounds is None:
        raise ValueError(
            "Coordinates must have bounds, consider using " "guess_bounds()"
        )

    if (
        coord_x is None
        or coord_y is None
        or coord_x.coord_system != coord_y.coord_system
    ):
        raise ValueError("The X and Y coordinates must share a CoordSystem.")

    xy_step = []
    for coord in [coord_x, coord_y]:
        name = coord.name()
        if coord.nbounds != 2:
            msg = "Coordinate {!r} must have two bounds " "per point.".format(
                name
            )
            raise ValueError(msg)
        if not (
            coord.units == cf_units.Unit("degrees")
            or coord.units.is_convertible("meters")
        ):
            raise ValueError(
                "Coordinate {!r} units must be either degrees or "
                "convertible to meters.".format(name)
            )
        if not coord.is_contiguous():
            raise ValueError(
                "Coordinate {!r} bounds must be " "contiguous.".format(name)
            )
        xy_step.append(np.diff(coord.bounds[0]))
        if not np.allclose(np.diff(coord.bounds), xy_step[-1]):
            msg = "Coordinate {!r} bounds must be regularly " "spaced.".format(
                name
            )
            raise ValueError(msg)

    if coord_x.points[0] > coord_x.points[-1]:
        raise ValueError(
            "Coordinate {!r} x-points must be monotonically"
            "increasing.".format(name)
        )

    data = cube.data

    # Make sure we have a YX data layout.
    if cube.coord_dims(coord_x) == 0:
        data = data.T

    x_step, y_step = xy_step
    if y_step > 0:
        # Flip the data so North is at the top.
        data = data[::-1, :]
        y_step *= -1

    coord_system = coord_x.coord_system
    x_bounds = coord_x.bounds
    if isinstance(coord_system, iris.coord_systems.GeogCS):
        big_indices = np.where(coord_x.points > 180)[0]
        n_big = len(big_indices)
        if n_big:
            data = np.roll(data, n_big, axis=1)
            x_bounds = x_bounds.copy()
            x_bounds[big_indices] -= 360

    x_min = np.min(x_bounds)
    y_max = np.max(coord_y.bounds)
    _gdal_write_array(
        x_min, x_step, y_max, y_step, coord_system, data, fname, "GTiff"
    )
Exemplo n.º 54
0
 def __init__(self, *args, **kwargs):
     warn_deprecated(
         "the `iris.fileformats.rules.DebugString class is deprecated.")
     super(DebugString, self).__init__(*args, **kwargs)
Exemplo n.º 55
0
def convert(grib):
    """
    Converts a GRIB message into the corresponding items of Cube metadata.

    Args:

    * grib:
        A :class:`~iris.fileformats.grib.GribWrapper` object.

    Returns:
        A :class:`iris.fileformats.rules.ConversionMetadata` object.

    """
    factories = []
    references = []
    standard_name = None
    long_name = None
    units = None
    attributes = {}
    cell_methods = []
    dim_coords_and_dims = []
    aux_coords_and_dims = []

    # deprecation warning for this code path for edition 2 messages
    if grib.edition == 2:
        msg = ('This GRIB loader is deprecated and will be removed in '
               'a future release.  Please consider using the new '
               'GRIB loader by setting the :class:`iris.Future` '
               'option `strict_grib_load` to True; e.g.:\n'
               'iris.FUTURE.strict_grib_load = True\n'
               'Please report issues you experience to:\n'
               'https://groups.google.com/forum/#!topic/scitools-iris-dev/'
               'lMsOusKNfaU')
        warn_deprecated(msg)

    if \
            (grib.gridType=="reduced_gg"):
        aux_coords_and_dims.append(
            (AuxCoord(grib._y_points,
                      grib._y_coord_name,
                      units='degrees',
                      coord_system=grib._coord_system), 0))
        aux_coords_and_dims.append(
            (AuxCoord(grib._x_points,
                      grib._x_coord_name,
                      units='degrees',
                      coord_system=grib._coord_system), 0))

    if \
            (grib.gridType=="regular_ll") and \
            (grib.jPointsAreConsecutive == 0):
        dim_coords_and_dims.append(
            (DimCoord(grib._y_points,
                      grib._y_coord_name,
                      units='degrees',
                      coord_system=grib._coord_system), 0))
        dim_coords_and_dims.append((DimCoord(grib._x_points,
                                             grib._x_coord_name,
                                             units='degrees',
                                             coord_system=grib._coord_system,
                                             circular=grib._x_circular), 1))

    if \
            (grib.gridType=="regular_ll") and \
            (grib.jPointsAreConsecutive == 1):
        dim_coords_and_dims.append(
            (DimCoord(grib._y_points,
                      grib._y_coord_name,
                      units='degrees',
                      coord_system=grib._coord_system), 1))
        dim_coords_and_dims.append((DimCoord(grib._x_points,
                                             grib._x_coord_name,
                                             units='degrees',
                                             coord_system=grib._coord_system,
                                             circular=grib._x_circular), 0))

    if \
            (grib.gridType=="regular_gg") and \
            (grib.jPointsAreConsecutive == 0):
        dim_coords_and_dims.append(
            (DimCoord(grib._y_points,
                      grib._y_coord_name,
                      units='degrees',
                      coord_system=grib._coord_system), 0))
        dim_coords_and_dims.append((DimCoord(grib._x_points,
                                             grib._x_coord_name,
                                             units='degrees',
                                             coord_system=grib._coord_system,
                                             circular=grib._x_circular), 1))

    if \
            (grib.gridType=="regular_gg") and \
            (grib.jPointsAreConsecutive == 1):
        dim_coords_and_dims.append(
            (DimCoord(grib._y_points,
                      grib._y_coord_name,
                      units='degrees',
                      coord_system=grib._coord_system), 1))
        dim_coords_and_dims.append((DimCoord(grib._x_points,
                                             grib._x_coord_name,
                                             units='degrees',
                                             coord_system=grib._coord_system,
                                             circular=grib._x_circular), 0))

    if \
            (grib.gridType=="rotated_ll") and \
            (grib.jPointsAreConsecutive == 0):
        dim_coords_and_dims.append(
            (DimCoord(grib._y_points,
                      grib._y_coord_name,
                      units='degrees',
                      coord_system=grib._coord_system), 0))
        dim_coords_and_dims.append((DimCoord(grib._x_points,
                                             grib._x_coord_name,
                                             units='degrees',
                                             coord_system=grib._coord_system,
                                             circular=grib._x_circular), 1))

    if \
            (grib.gridType=="rotated_ll") and \
            (grib.jPointsAreConsecutive == 1):
        dim_coords_and_dims.append(
            (DimCoord(grib._y_points,
                      grib._y_coord_name,
                      units='degrees',
                      coord_system=grib._coord_system), 1))
        dim_coords_and_dims.append((DimCoord(grib._x_points,
                                             grib._x_coord_name,
                                             units='degrees',
                                             coord_system=grib._coord_system,
                                             circular=grib._x_circular), 0))

    if grib.gridType in ["polar_stereographic", "lambert"]:
        dim_coords_and_dims.append(
            (DimCoord(grib._y_points,
                      grib._y_coord_name,
                      units="m",
                      coord_system=grib._coord_system), 0))
        dim_coords_and_dims.append(
            (DimCoord(grib._x_points,
                      grib._x_coord_name,
                      units="m",
                      coord_system=grib._coord_system), 1))

    if \
            (grib.edition == 1) and \
            (grib.table2Version < 128) and \
            (grib.indicatorOfParameter == 11) and \
            (grib._cf_data is None):
        standard_name = "air_temperature"
        units = "kelvin"

    if \
            (grib.edition == 1) and \
            (grib.table2Version < 128) and \
            (grib.indicatorOfParameter == 33) and \
            (grib._cf_data is None):
        standard_name = "x_wind"
        units = "m s-1"

    if \
            (grib.edition == 1) and \
            (grib.table2Version < 128) and \
            (grib.indicatorOfParameter == 34) and \
            (grib._cf_data is None):
        standard_name = "y_wind"
        units = "m s-1"

    if \
            (grib.edition == 1) and \
            (grib._cf_data is not None):
        standard_name = grib._cf_data.standard_name
        long_name = grib._cf_data.standard_name or grib._cf_data.long_name
        units = grib._cf_data.units

    if \
            (grib.edition == 1) and \
            (grib.table2Version >= 128) and \
            (grib._cf_data is None):
        long_name = "UNKNOWN LOCAL PARAM " + str(
            grib.indicatorOfParameter) + "." + str(grib.table2Version)
        units = "???"

    if \
            (grib.edition == 1) and \
            (grib.table2Version == 1) and \
            (grib.indicatorOfParameter >= 128):
        long_name = "UNKNOWN LOCAL PARAM " + str(
            grib.indicatorOfParameter) + "." + str(grib.table2Version)
        units = "???"

    if \
            (grib.edition == 2) and \
            (grib._cf_data is not None):
        standard_name = grib._cf_data.standard_name
        long_name = grib._cf_data.long_name
        units = grib._cf_data.units

    if \
            (grib.edition == 1) and \
            (grib._phenomenonDateTime != -1.0):
        aux_coords_and_dims.append(
            (DimCoord(points=grib.startStep,
                      standard_name='forecast_period',
                      units=grib._forecastTimeUnit), None))
        aux_coords_and_dims.append(
            (DimCoord(points=grib.phenomenon_points('hours'),
                      standard_name='time',
                      units=Unit('hours since epoch',
                                 CALENDAR_GREGORIAN)), None))

    def add_bounded_time_coords(aux_coords_and_dims, grib):
        t_bounds = grib.phenomenon_bounds('hours')
        period = Unit('hours').convert(t_bounds[1] - t_bounds[0],
                                       grib._forecastTimeUnit)
        aux_coords_and_dims.append(
            (DimCoord(standard_name='forecast_period',
                      units=grib._forecastTimeUnit,
                      points=grib._forecastTime + 0.5 * period,
                      bounds=[grib._forecastTime,
                              grib._forecastTime + period]), None))
        aux_coords_and_dims.append(
            (DimCoord(standard_name='time',
                      units=Unit('hours since epoch', CALENDAR_GREGORIAN),
                      points=0.5 * (t_bounds[0] + t_bounds[1]),
                      bounds=t_bounds), None))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 2):
        add_bounded_time_coords(aux_coords_and_dims, grib)

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 3):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("mean", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 4):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("sum", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 5):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("_difference", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 51):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("mean", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 113):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("mean", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 114):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("sum", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 115):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("mean", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 116):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("sum", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 117):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("mean", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 118):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("_covariance", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 123):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("mean", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 124):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("sum", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.timeRangeIndicator == 125):
        add_bounded_time_coords(aux_coords_and_dims, grib)
        cell_methods.append(CellMethod("standard_deviation", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 0):
        aux_coords_and_dims.append(
            (DimCoord(points=Unit(grib._forecastTimeUnit).convert(
                np.int32(grib._forecastTime), "hours"),
                      standard_name='forecast_period',
                      units="hours"), None))
        aux_coords_and_dims.append(
            (DimCoord(points=grib.phenomenon_points('hours'),
                      standard_name='time',
                      units=Unit('hours since epoch',
                                 CALENDAR_GREGORIAN)), None))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber in (8, 9)):
        add_bounded_time_coords(aux_coords_and_dims, grib)

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 8) and \
            (grib.typeOfStatisticalProcessing == 0):
        cell_methods.append(CellMethod("mean", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 8) and \
            (grib.typeOfStatisticalProcessing == 1):
        cell_methods.append(CellMethod("sum", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 8) and \
            (grib.typeOfStatisticalProcessing == 2):
        cell_methods.append(CellMethod("maximum", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 8) and \
            (grib.typeOfStatisticalProcessing == 3):
        cell_methods.append(CellMethod("minimum", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 8) and \
            (grib.typeOfStatisticalProcessing == 4):
        cell_methods.append(CellMethod("_difference", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 8) and \
            (grib.typeOfStatisticalProcessing == 5):
        cell_methods.append(CellMethod("_root_mean_square", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 8) and \
            (grib.typeOfStatisticalProcessing == 6):
        cell_methods.append(CellMethod("standard_deviation", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 8) and \
            (grib.typeOfStatisticalProcessing == 7):
        cell_methods.append(CellMethod("_convariance", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 8) and \
            (grib.typeOfStatisticalProcessing == 8):
        cell_methods.append(CellMethod("_difference", coords="time"))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 8) and \
            (grib.typeOfStatisticalProcessing == 9):
        cell_methods.append(CellMethod("_ratio", coords="time"))

    if \
            (grib.edition == 1) and \
            (grib.levelType == 'pl'):
        aux_coords_and_dims.append((DimCoord(points=grib.level,
                                             long_name="pressure",
                                             units="hPa"), None))

    if \
            (grib.edition == 1) and \
            (grib.levelType == 'sfc'):

        if (grib._cf_data is not None) and \
        (grib._cf_data.set_height is not None):
            aux_coords_and_dims.append(
                (DimCoord(points=grib._cf_data.set_height,
                          long_name="height",
                          units="m",
                          attributes={'positive': 'up'}), None))
        elif grib.typeOfLevel == 'heightAboveGround':  # required for NCAR
            aux_coords_and_dims.append((DimCoord(points=grib.level,
                                                 long_name="height",
                                                 units="m",
                                                 attributes={'positive':
                                                             'up'}), None))

    if \
            (grib.edition == 1) and \
            (grib.levelType == 'ml') and \
            (hasattr(grib, 'pv')):
        aux_coords_and_dims.append(
            (AuxCoord(grib.level,
                      standard_name='model_level_number',
                      attributes={'positive': 'up'}), None))
        aux_coords_and_dims.append((DimCoord(grib.pv[grib.level],
                                             long_name='level_pressure',
                                             units='Pa'), None))
        aux_coords_and_dims.append((AuxCoord(
            grib.pv[grib.numberOfCoordinatesValues // 2 + grib.level],
            long_name='sigma'), None))
        factories.append(
            Factory(HybridPressureFactory, [{
                'long_name': 'level_pressure'
            }, {
                'long_name': 'sigma'
            },
                                            Reference('surface_pressure')]))

    if \
            (grib.edition == 2) and \
            (grib.typeOfFirstFixedSurface != grib.typeOfSecondFixedSurface):
        warnings.warn("Different vertical bound types not yet handled.")

    if \
            (grib.edition == 2) and \
            (grib.typeOfFirstFixedSurface == 103) and \
            (grib.typeOfSecondFixedSurface == 255):
        aux_coords_and_dims.append(
            (DimCoord(points=grib.scaledValueOfFirstFixedSurface /
                      (10.0**grib.scaleFactorOfFirstFixedSurface),
                      standard_name="height",
                      units="m"), None))

    if \
            (grib.edition == 2) and \
            (grib.typeOfFirstFixedSurface == 103) and \
            (grib.typeOfSecondFixedSurface != 255):
        aux_coords_and_dims.append((DimCoord(
            points=0.5 * (grib.scaledValueOfFirstFixedSurface /
                          (10.0**grib.scaleFactorOfFirstFixedSurface) +
                          grib.scaledValueOfSecondFixedSurface /
                          (10.0**grib.scaleFactorOfSecondFixedSurface)),
            standard_name="height",
            units="m",
            bounds=[
                grib.scaledValueOfFirstFixedSurface /
                (10.0**grib.scaleFactorOfFirstFixedSurface),
                grib.scaledValueOfSecondFixedSurface /
                (10.0**grib.scaleFactorOfSecondFixedSurface)
            ]), None))

    if \
            (grib.edition == 2) and \
            (grib.typeOfFirstFixedSurface == 100) and \
            (grib.typeOfSecondFixedSurface == 255):
        aux_coords_and_dims.append(
            (DimCoord(points=grib.scaledValueOfFirstFixedSurface /
                      (10.0**grib.scaleFactorOfFirstFixedSurface),
                      long_name="pressure",
                      units="Pa"), None))

    if \
            (grib.edition == 2) and \
            (grib.typeOfFirstFixedSurface == 100) and \
            (grib.typeOfSecondFixedSurface != 255):
        aux_coords_and_dims.append((DimCoord(
            points=0.5 * (grib.scaledValueOfFirstFixedSurface /
                          (10.0**grib.scaleFactorOfFirstFixedSurface) +
                          grib.scaledValueOfSecondFixedSurface /
                          (10.0**grib.scaleFactorOfSecondFixedSurface)),
            long_name="pressure",
            units="Pa",
            bounds=[
                grib.scaledValueOfFirstFixedSurface /
                (10.0**grib.scaleFactorOfFirstFixedSurface),
                grib.scaledValueOfSecondFixedSurface /
                (10.0**grib.scaleFactorOfSecondFixedSurface)
            ]), None))

    if \
            (grib.edition == 2) and \
            (grib.typeOfFirstFixedSurface in [105, 119]) and \
            (grib.numberOfCoordinatesValues > 0):
        aux_coords_and_dims.append(
            (AuxCoord(grib.scaledValueOfFirstFixedSurface,
                      standard_name='model_level_number',
                      attributes={'positive': 'up'}), None))
        aux_coords_and_dims.append(
            (DimCoord(grib.pv[grib.scaledValueOfFirstFixedSurface],
                      long_name='level_pressure',
                      units='Pa'), None))
        aux_coords_and_dims.append(
            (AuxCoord(grib.pv[grib.numberOfCoordinatesValues // 2 +
                              grib.scaledValueOfFirstFixedSurface],
                      long_name='sigma'), None))
        factories.append(
            Factory(HybridPressureFactory,
                    [{
                        'long_name': 'level_pressure'
                    }, {
                        'long_name': 'sigma'
                    },
                     Reference('surface_air_pressure')]))

    if grib._originatingCentre != 'unknown':
        aux_coords_and_dims.append((AuxCoord(points=grib._originatingCentre,
                                             long_name='originating_centre',
                                             units='no_unit'), None))

    if \
            (grib.edition == 2) and \
            (grib.productDefinitionTemplateNumber == 1):
        aux_coords_and_dims.append((DimCoord(points=grib.perturbationNumber,
                                             long_name='ensemble_member',
                                             units='no_unit'), None))

    if \
            (grib.edition == 2) and \
            grib.productDefinitionTemplateNumber not in (0, 8):
        attributes["GRIB_LOAD_WARNING"] = (
            "unsupported GRIB%d ProductDefinitionTemplate: #4.%d" %
            (grib.edition, grib.productDefinitionTemplateNumber))

    if \
            (grib.edition == 2) and \
            (grib.centre == 'ecmf') and \
            (grib.discipline == 0) and \
            (grib.parameterCategory == 3) and \
            (grib.parameterNumber == 25) and \
            (grib.typeOfFirstFixedSurface == 105):
        references.append(
            ReferenceTarget(
                'surface_air_pressure', lambda cube: {
                    'standard_name': 'surface_air_pressure',
                    'units': 'Pa',
                    'data': np.exp(cube.data)
                }))

    return ConversionMetadata(factories, references, standard_name, long_name,
                              units, attributes, cell_methods,
                              dim_coords_and_dims, aux_coords_and_dims)
Exemplo n.º 56
0
 def __init__(self, name, value):
     warn_deprecated(
         "the `iris.fileformats.rules.CmCustomAttribute class is "
         "deprecated.")
     self.name = name
     self.value = value
Exemplo n.º 57
0
def intersection_of_cubes(cube, other_cube):
    """
    Return the two Cubes of intersection given two Cubes.

    .. note:: The intersection of cubes function will ignore all single valued
        coordinates in checking the intersection.

    Args:

    * cube:
        An instance of :class:`iris.cube.Cube`.
    * other_cube:
        An instance of :class:`iris.cube.Cube`.

    Returns:
        A pair of :class:`iris.cube.Cube` instances in a tuple corresponding to
        the original cubes restricted to their intersection.

    .. deprecated:: 3.2.0

       Instead use :meth:`iris.cube.CubeList.extract_overlapping`. For example,
       rather than calling

       .. code::

          cube1, cube2 = intersection_of_cubes(cube1, cube2)

       replace with

       .. code::

          cubes = CubeList([cube1, cube2])
          coords = ["latitude", "longitude"]    # Replace with relevant coords
          intersections = cubes.extract_overlapping(coords)
          cube1, cube2 = (intersections[0], intersections[1])

    """
    wmsg = (
        "iris.analysis.maths.intersection_of_cubes has been deprecated and will "
        "be removed, please use iris.cube.CubeList.extract_overlapping "
        "instead. See intersection_of_cubes docstring for more information.")
    warn_deprecated(wmsg)

    # Take references of the original cubes (which will be copied when
    # slicing later).
    new_cube_self = cube
    new_cube_other = other_cube

    # This routine has not been written to cope with multi-dimensional
    # coordinates.
    for coord in cube.coords() + other_cube.coords():
        if coord.ndim != 1:
            raise iris.exceptions.CoordinateMultiDimError(coord)

    coord_comp = iris.analysis._dimensional_metadata_comparison(
        cube, other_cube)

    if coord_comp["ungroupable_and_dimensioned"]:
        raise ValueError("Cubes do not share all coordinates in common, "
                         "cannot intersect.")

    # cubes must have matching coordinates
    for coord in cube.coords():
        other_coord = other_cube.coord(coord)

        # Only intersect coordinates which are different, single values
        # coordinates may differ.
        if coord.shape[0] > 1 and coord != other_coord:
            intersected_coord = coord.intersect(other_coord)
            new_cube_self = new_cube_self.subset(intersected_coord)
            new_cube_other = new_cube_other.subset(intersected_coord)

    return new_cube_self, new_cube_other
Exemplo n.º 58
0
import scipy.interpolate

from iris._deprecation import warn_deprecated
from iris.analysis._interpolate_private import Linear1dExtrapolator
import iris.coord_systems as coord_systems
from iris.exceptions import TranslationError
# NOTE: careful here, to avoid circular imports (as iris imports grib)
from iris.fileformats.grib import grib_phenom_translation as gptx
from iris.fileformats.grib import _save_rules
import iris.fileformats.grib._load_convert
from iris.fileformats.grib.message import GribMessage
import iris.fileformats.grib.load_rules

# Issue a blanket deprecation for this module.
warn_deprecated(
    "The module iris.fileformats.grib is deprecated since v1.10. "
    "Please install the package 'iris_grib' package instead.")


__all__ = ['load_cubes', 'save_grib2', 'load_pairs_from_fields',
           'save_pairs_from_cube', 'save_messages', 'GribWrapper',
           'as_messages', 'as_pairs', 'grib_generator', 'reset_load_rules',
           'hindcast_workaround']


#: Set this flag to True to enable support of negative forecast periods
#: when loading and saving GRIB files.
#:
#: .. deprecated:: 1.10
hindcast_workaround = False
Exemplo n.º 59
0
def wgdos_unpack(data, lbrow, lbnpt, bmdi):
    warn_deprecated(_DEPRECATION_WARNING)
    return old_pp_packing.wgdos_unpack(data, lbrow, lbnpt, bmdi)
Exemplo n.º 60
0
 def __init__(self, *args, **kwargs):
     warn_deprecated(
         "the `iris.fileformats.rules.DebugString class is deprecated.")
     super(DebugString, self).__init__(*args, **kwargs)